var/home/core/zuul-output/0000755000175000017500000000000015114536074014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114552243015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005323712215114552234017704 0ustar rootrootDec 05 11:07:44 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 11:07:44 crc restorecon[4713]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:44 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 11:07:45 crc restorecon[4713]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 11:07:45 crc kubenswrapper[4728]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.962960 4728 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965651 4728 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965669 4728 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965674 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965678 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965683 4728 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965687 4728 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965692 4728 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965696 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965700 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965703 4728 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965707 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965710 4728 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965714 4728 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965717 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965721 4728 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965725 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965728 4728 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965732 4728 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965735 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965739 4728 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965742 4728 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965745 4728 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965749 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965753 4728 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965763 4728 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965767 4728 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965770 4728 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965774 4728 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965779 4728 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965783 4728 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965790 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965794 4728 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965810 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965814 4728 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965818 4728 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965821 4728 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965826 4728 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965831 4728 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965836 4728 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965840 4728 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965845 4728 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965850 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965853 4728 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965857 4728 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965860 4728 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965864 4728 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965868 4728 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965871 4728 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965875 4728 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965878 4728 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965882 4728 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965886 4728 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965889 4728 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965892 4728 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965896 4728 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965899 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965902 4728 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965906 4728 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965909 4728 feature_gate.go:330] unrecognized feature gate: Example Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965913 4728 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965923 4728 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965926 4728 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965930 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965934 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965939 4728 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965943 4728 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965948 4728 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965952 4728 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965956 4728 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965960 4728 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.965964 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966343 4728 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966353 4728 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966360 4728 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966365 4728 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966370 4728 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966374 4728 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966380 4728 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966385 4728 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966389 4728 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966393 4728 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966398 4728 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966402 4728 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966406 4728 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966410 4728 flags.go:64] FLAG: --cgroup-root="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966414 4728 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966418 4728 flags.go:64] FLAG: --client-ca-file="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966422 4728 flags.go:64] FLAG: --cloud-config="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966426 4728 flags.go:64] FLAG: --cloud-provider="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966429 4728 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966439 4728 flags.go:64] FLAG: --cluster-domain="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966443 4728 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966447 4728 flags.go:64] FLAG: --config-dir="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966451 4728 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966455 4728 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966461 4728 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966474 4728 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966479 4728 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966483 4728 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966487 4728 flags.go:64] FLAG: --contention-profiling="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966491 4728 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966495 4728 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966499 4728 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966504 4728 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966508 4728 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966512 4728 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966517 4728 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966520 4728 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966525 4728 flags.go:64] FLAG: --enable-server="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966528 4728 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966536 4728 flags.go:64] FLAG: --event-burst="100" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966541 4728 flags.go:64] FLAG: --event-qps="50" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966545 4728 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966549 4728 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966553 4728 flags.go:64] FLAG: --eviction-hard="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966558 4728 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966562 4728 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966566 4728 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966570 4728 flags.go:64] FLAG: --eviction-soft="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966574 4728 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966578 4728 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966582 4728 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966586 4728 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966590 4728 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966594 4728 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966598 4728 flags.go:64] FLAG: --feature-gates="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966602 4728 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966606 4728 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966610 4728 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966615 4728 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966619 4728 flags.go:64] FLAG: --healthz-port="10248" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966623 4728 flags.go:64] FLAG: --help="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966633 4728 flags.go:64] FLAG: --hostname-override="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966637 4728 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966642 4728 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966647 4728 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966651 4728 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966655 4728 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966659 4728 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966663 4728 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966666 4728 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966670 4728 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966674 4728 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966678 4728 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966682 4728 flags.go:64] FLAG: --kube-reserved="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966686 4728 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966691 4728 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966696 4728 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966700 4728 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966704 4728 flags.go:64] FLAG: --lock-file="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966708 4728 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966712 4728 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966716 4728 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966722 4728 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966726 4728 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966730 4728 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966734 4728 flags.go:64] FLAG: --logging-format="text" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966738 4728 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966743 4728 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966747 4728 flags.go:64] FLAG: --manifest-url="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966757 4728 flags.go:64] FLAG: --manifest-url-header="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966763 4728 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966767 4728 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966772 4728 flags.go:64] FLAG: --max-pods="110" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966776 4728 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966780 4728 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966784 4728 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966792 4728 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966817 4728 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966821 4728 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966825 4728 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966835 4728 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966839 4728 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966843 4728 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966847 4728 flags.go:64] FLAG: --pod-cidr="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966851 4728 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966857 4728 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966861 4728 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966865 4728 flags.go:64] FLAG: --pods-per-core="0" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966869 4728 flags.go:64] FLAG: --port="10250" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966873 4728 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966877 4728 flags.go:64] FLAG: --provider-id="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966881 4728 flags.go:64] FLAG: --qos-reserved="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966885 4728 flags.go:64] FLAG: --read-only-port="10255" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966889 4728 flags.go:64] FLAG: --register-node="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966893 4728 flags.go:64] FLAG: --register-schedulable="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966897 4728 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966903 4728 flags.go:64] FLAG: --registry-burst="10" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966907 4728 flags.go:64] FLAG: --registry-qps="5" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966911 4728 flags.go:64] FLAG: --reserved-cpus="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966915 4728 flags.go:64] FLAG: --reserved-memory="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966920 4728 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966924 4728 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966928 4728 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966932 4728 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966935 4728 flags.go:64] FLAG: --runonce="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966939 4728 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966943 4728 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966947 4728 flags.go:64] FLAG: --seccomp-default="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966952 4728 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966956 4728 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966960 4728 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966964 4728 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966968 4728 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966978 4728 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966983 4728 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966986 4728 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966990 4728 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966994 4728 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.966998 4728 flags.go:64] FLAG: --system-cgroups="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967002 4728 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967009 4728 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967013 4728 flags.go:64] FLAG: --tls-cert-file="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967017 4728 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967026 4728 flags.go:64] FLAG: --tls-min-version="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967030 4728 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967035 4728 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967039 4728 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967043 4728 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967047 4728 flags.go:64] FLAG: --v="2" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967052 4728 flags.go:64] FLAG: --version="false" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967058 4728 flags.go:64] FLAG: --vmodule="" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967063 4728 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967067 4728 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967192 4728 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967197 4728 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967201 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967206 4728 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967210 4728 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967214 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967218 4728 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967221 4728 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967225 4728 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967229 4728 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967233 4728 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967236 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967239 4728 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967243 4728 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967247 4728 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967250 4728 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967260 4728 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967264 4728 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967268 4728 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967273 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967276 4728 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967280 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967284 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967287 4728 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967293 4728 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967297 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967300 4728 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967304 4728 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967308 4728 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967313 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967317 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967321 4728 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967326 4728 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967329 4728 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967333 4728 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967337 4728 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967341 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967345 4728 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967349 4728 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967352 4728 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967356 4728 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967360 4728 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967363 4728 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967366 4728 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967370 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967374 4728 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967377 4728 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967380 4728 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967384 4728 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967387 4728 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967391 4728 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967395 4728 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967405 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967408 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967412 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967415 4728 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967420 4728 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967424 4728 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967427 4728 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967431 4728 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967434 4728 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967437 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967441 4728 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967444 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967448 4728 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967452 4728 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967456 4728 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967460 4728 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967464 4728 feature_gate.go:330] unrecognized feature gate: Example Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967468 4728 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.967472 4728 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.967478 4728 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.977404 4728 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.977439 4728 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977672 4728 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977688 4728 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977693 4728 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977698 4728 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977703 4728 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977708 4728 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977712 4728 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977766 4728 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.977772 4728 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978190 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978253 4728 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978314 4728 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978372 4728 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978429 4728 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978485 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978602 4728 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978674 4728 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978680 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978685 4728 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978689 4728 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.978724 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979142 4728 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979200 4728 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979263 4728 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979320 4728 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979373 4728 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979425 4728 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979476 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979529 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979581 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979638 4728 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979689 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979733 4728 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.979781 4728 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980002 4728 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980060 4728 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980151 4728 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980214 4728 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980262 4728 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980304 4728 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980350 4728 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980391 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980432 4728 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980472 4728 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980520 4728 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980563 4728 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980604 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980659 4728 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980705 4728 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980750 4728 feature_gate.go:330] unrecognized feature gate: Example Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980821 4728 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980874 4728 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980918 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.980961 4728 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981017 4728 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981063 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981114 4728 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981158 4728 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981200 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981250 4728 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981301 4728 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981357 4728 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981416 4728 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981481 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981537 4728 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981593 4728 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981648 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981701 4728 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981759 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981834 4728 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.981908 4728 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.981979 4728 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982205 4728 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982272 4728 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982330 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982390 4728 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982451 4728 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982510 4728 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982564 4728 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982621 4728 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982677 4728 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982734 4728 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982792 4728 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982878 4728 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982937 4728 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.982998 4728 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983058 4728 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983118 4728 feature_gate.go:330] unrecognized feature gate: Example Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983175 4728 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983229 4728 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983288 4728 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983343 4728 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983406 4728 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983463 4728 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983517 4728 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983570 4728 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983623 4728 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983685 4728 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983744 4728 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983911 4728 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.983978 4728 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984036 4728 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984114 4728 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984186 4728 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984247 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984304 4728 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984359 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984415 4728 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984472 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984531 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984584 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984644 4728 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984702 4728 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984760 4728 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984845 4728 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984906 4728 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.984962 4728 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985028 4728 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985085 4728 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985148 4728 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985207 4728 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985264 4728 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985325 4728 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985380 4728 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985441 4728 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985497 4728 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985550 4728 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985604 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985667 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985724 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985778 4728 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985864 4728 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985924 4728 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.985979 4728 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986039 4728 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986092 4728 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986146 4728 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986212 4728 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986271 4728 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986325 4728 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986381 4728 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986435 4728 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 11:07:45 crc kubenswrapper[4728]: W1205 11:07:45.986488 4728 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.986548 4728 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.986835 4728 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.990106 4728 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.990269 4728 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.990884 4728 server.go:997] "Starting client certificate rotation" Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.990980 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.991377 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-14 22:36:43.976441065 +0000 UTC Dec 05 11:07:45 crc kubenswrapper[4728]: I1205 11:07:45.991477 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 11:07:45 crc kubenswrapper[4728]: E1205 11:07:45.998249 4728 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.019919 4728 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.021712 4728 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.029071 4728 log.go:25] "Validated CRI v1 runtime API" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.042365 4728 log.go:25] "Validated CRI v1 image API" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.045957 4728 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.048127 4728 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-11-03-27-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.048159 4728 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.067273 4728 manager.go:217] Machine: {Timestamp:2025-12-05 11:07:46.065815097 +0000 UTC m=+0.207937810 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:65b68dc7-92a1-4fa1-bbc7-423a936860c6 BootID:feb38e4d-326c-4c7a-a272-95e0ac54f009 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e1:09:94 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e1:09:94 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c0:b3:3a Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:6d:90:58 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e8:8a:25 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:98:b3:18 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ce:42:2b:d2:26:cd Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f2:b7:b6:a4:a4:2b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.067597 4728 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.067759 4728 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.068449 4728 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.068697 4728 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.068739 4728 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069026 4728 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069036 4728 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069201 4728 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069234 4728 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069437 4728 state_mem.go:36] "Initialized new in-memory state store" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.069528 4728 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.070813 4728 kubelet.go:418] "Attempting to sync node with API server" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.070842 4728 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.070868 4728 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.070884 4728 kubelet.go:324] "Adding apiserver pod source" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.070901 4728 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 11:07:46 crc kubenswrapper[4728]: W1205 11:07:46.072322 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.072391 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: W1205 11:07:46.072402 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.072475 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.072881 4728 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.073291 4728 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074063 4728 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074571 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074596 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074606 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074614 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074628 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074637 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074645 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074658 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074668 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074678 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074689 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.074698 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.075058 4728 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.075535 4728 server.go:1280] "Started kubelet" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.075760 4728 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.075947 4728 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.075975 4728 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.076502 4728 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 11:07:46 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.078462 4728 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.078518 4728 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.078543 4728 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 07:11:04.892651103 +0000 UTC Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.078573 4728 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 500h3m18.814079786s for next certificate rotation Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.078522 4728 server.go:460] "Adding debug handlers to kubelet server" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.078937 4728 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.080879 4728 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.081408 4728 factory.go:55] Registering systemd factory Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.081429 4728 factory.go:221] Registration of the systemd container factory successfully Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.081932 4728 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.082393 4728 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.082009 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="200ms" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.081860 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.146:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e4d1736d7cdcb default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:07:46.075512267 +0000 UTC m=+0.217634960,LastTimestamp:2025-12-05 11:07:46.075512267 +0000 UTC m=+0.217634960,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:07:46 crc kubenswrapper[4728]: W1205 11:07:46.081682 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.082730 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.082879 4728 factory.go:153] Registering CRI-O factory Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.082910 4728 factory.go:221] Registration of the crio container factory successfully Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.083000 4728 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.083042 4728 factory.go:103] Registering Raw factory Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.083069 4728 manager.go:1196] Started watching for new ooms in manager Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.084033 4728 manager.go:319] Starting recovery of all containers Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094580 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094638 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094655 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094668 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094679 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094691 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094702 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094714 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094729 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094741 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094752 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094765 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094780 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094813 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094828 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094842 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094856 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094871 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094885 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094897 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094911 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094979 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.094992 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095007 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095021 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095035 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095084 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095102 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095116 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095128 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095141 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095153 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095168 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095180 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095217 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095232 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095245 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095260 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095272 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095284 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095299 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095311 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095324 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095337 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095350 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095363 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095375 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095388 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095401 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095417 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095433 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095445 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095465 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095480 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095494 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095509 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095532 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095546 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095559 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095572 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095584 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095595 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095607 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095617 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095630 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095641 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095654 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095687 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095702 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095717 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095731 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095744 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095761 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095774 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095787 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095819 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095832 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095845 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095860 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095874 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095888 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095901 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095922 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095935 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095949 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095963 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095976 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.095988 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096004 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096016 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096030 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096043 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096056 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096069 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096084 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096096 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096108 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096122 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096135 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096147 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096160 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096175 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096188 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096201 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096226 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096240 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096256 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096272 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096286 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096300 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096315 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096329 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096344 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096358 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096378 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096392 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096405 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096417 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096429 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096442 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096457 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096470 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096483 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096495 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096507 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096521 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096535 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096548 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096562 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096575 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096588 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096602 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096614 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096627 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096639 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096656 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096669 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096681 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096693 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096706 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096717 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096731 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096744 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096758 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096771 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096785 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096879 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096895 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096908 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096920 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096933 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096945 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096960 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096973 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096985 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.096998 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097012 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097025 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097040 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097053 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097066 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097079 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097095 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097107 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097153 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097168 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097183 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097199 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097213 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097228 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097240 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097253 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097266 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097279 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097293 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097941 4728 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097978 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.097992 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098005 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098017 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098029 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098042 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098054 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098067 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098083 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098096 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098109 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098127 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098139 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098150 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098164 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098176 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098190 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098202 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098217 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098233 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098247 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098261 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098274 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098289 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098306 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098319 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098333 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098349 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098361 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098374 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098388 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098402 4728 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098414 4728 reconstruct.go:97] "Volume reconstruction finished" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.098422 4728 reconciler.go:26] "Reconciler: start to sync state" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.106136 4728 manager.go:324] Recovery completed Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.114095 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.115661 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.115691 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.115699 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.118479 4728 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.118493 4728 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.118510 4728 state_mem.go:36] "Initialized new in-memory state store" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.179334 4728 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.280060 4728 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.284603 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="400ms" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.285224 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.146:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e4d1736d7cdcb default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:07:46.075512267 +0000 UTC m=+0.217634960,LastTimestamp:2025-12-05 11:07:46.075512267 +0000 UTC m=+0.217634960,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.347600 4728 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.350649 4728 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.350701 4728 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.350724 4728 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.350773 4728 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 11:07:46 crc kubenswrapper[4728]: W1205 11:07:46.351956 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.352036 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.376203 4728 policy_none.go:49] "None policy: Start" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.377288 4728 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.377321 4728 state_mem.go:35] "Initializing new in-memory state store" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.380470 4728 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.450897 4728 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.453706 4728 manager.go:334] "Starting Device Plugin manager" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.453862 4728 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.453883 4728 server.go:79] "Starting device plugin registration server" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.454603 4728 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.454630 4728 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.455106 4728 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.455209 4728 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.455218 4728 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.465145 4728 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.554864 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.557062 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.557114 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.557125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.557160 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.557859 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.146:6443: connect: connection refused" node="crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.651088 4728 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.651284 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.652820 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.652868 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.652882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.653015 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.653347 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.653434 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.653960 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654014 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654028 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654251 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654322 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654350 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654911 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.654988 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655007 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655324 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655355 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655387 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655405 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655416 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655440 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655551 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655651 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.655685 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656217 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656263 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656532 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656660 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656692 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656840 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.656897 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657455 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657484 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657494 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657740 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.657783 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.658514 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.658551 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.659373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.659413 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.659423 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.685952 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="800ms" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705298 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705373 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705399 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705420 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705439 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705459 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705474 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705543 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705599 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705648 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705681 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705698 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705716 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705755 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.705854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.758307 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.759780 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.759846 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.759858 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.759888 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.760573 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.146:6443: connect: connection refused" node="crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807600 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807671 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807708 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807739 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807774 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807838 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807849 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807875 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807868 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807932 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807872 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808051 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808073 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808088 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808102 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807925 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808155 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808140 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807950 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808183 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.807948 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808118 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808216 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808270 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808280 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808309 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808341 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808375 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808377 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.808402 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 11:07:46 crc kubenswrapper[4728]: W1205 11:07:46.960393 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:46 crc kubenswrapper[4728]: E1205 11:07:46.960517 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:46 crc kubenswrapper[4728]: I1205 11:07:46.982500 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:46.999978 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.015036 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c97e43bfe717c0ccd1879d26b4650e84d3d31f902a34fb27f3327e7eb991da24 WatchSource:0}: Error finding container c97e43bfe717c0ccd1879d26b4650e84d3d31f902a34fb27f3327e7eb991da24: Status 404 returned error can't find the container with id c97e43bfe717c0ccd1879d26b4650e84d3d31f902a34fb27f3327e7eb991da24 Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.022455 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.038538 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-efa4be7cffe3936484c5265ac5bb73450d303e0029ad34db0fa9f35d69017c7f WatchSource:0}: Error finding container efa4be7cffe3936484c5265ac5bb73450d303e0029ad34db0fa9f35d69017c7f: Status 404 returned error can't find the container with id efa4be7cffe3936484c5265ac5bb73450d303e0029ad34db0fa9f35d69017c7f Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.052604 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.059622 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.067682 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-5b11e1b627b2d1f0a708d77bad951a0be98d878380434c66755292b057dd051d WatchSource:0}: Error finding container 5b11e1b627b2d1f0a708d77bad951a0be98d878380434c66755292b057dd051d: Status 404 returned error can't find the container with id 5b11e1b627b2d1f0a708d77bad951a0be98d878380434c66755292b057dd051d Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.073723 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a570f452f0484f2b669dab16cbf9e5d72615fc35d0087c136e4e47b674cc239d WatchSource:0}: Error finding container a570f452f0484f2b669dab16cbf9e5d72615fc35d0087c136e4e47b674cc239d: Status 404 returned error can't find the container with id a570f452f0484f2b669dab16cbf9e5d72615fc35d0087c136e4e47b674cc239d Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.077350 4728 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.160952 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.162402 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.162444 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.162456 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.162481 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.162924 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.146:6443: connect: connection refused" node="crc" Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.284088 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.284202 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.355582 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a570f452f0484f2b669dab16cbf9e5d72615fc35d0087c136e4e47b674cc239d"} Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.356908 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5b11e1b627b2d1f0a708d77bad951a0be98d878380434c66755292b057dd051d"} Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.358243 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"efa4be7cffe3936484c5265ac5bb73450d303e0029ad34db0fa9f35d69017c7f"} Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.360173 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c61e3ed16aec7a34632cfe98035feb17defcfb94c288fe032c5f05d29b9039a0"} Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.361342 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c97e43bfe717c0ccd1879d26b4650e84d3d31f902a34fb27f3327e7eb991da24"} Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.399658 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.399737 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:47 crc kubenswrapper[4728]: W1205 11:07:47.450338 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.450406 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.486761 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="1.6s" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.965878 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.967717 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.967758 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.967769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:47 crc kubenswrapper[4728]: I1205 11:07:47.967813 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:07:47 crc kubenswrapper[4728]: E1205 11:07:47.968337 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.146:6443: connect: connection refused" node="crc" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.077143 4728 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.117651 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 11:07:48 crc kubenswrapper[4728]: E1205 11:07:48.119000 4728 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.146:6443: connect: connection refused" logger="UnhandledError" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.365431 4728 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6" exitCode=0 Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.365528 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.365585 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.366961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.366995 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.367006 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.368265 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.368289 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.368299 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.368309 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.368372 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369445 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369456 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369451 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159" exitCode=0 Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369632 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.369483 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370612 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370650 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370650 4728 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538" exitCode=0 Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370670 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370698 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.370753 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371402 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371419 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371785 4728 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815" exitCode=0 Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371825 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815"} Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.371861 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.372752 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.372843 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.372862 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.372848 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.373414 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.373434 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:48 crc kubenswrapper[4728]: I1205 11:07:48.373442 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.077742 4728 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.146:6443: connect: connection refused Dec 05 11:07:49 crc kubenswrapper[4728]: E1205 11:07:49.088166 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="3.2s" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.378890 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.378963 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.378966 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.379039 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.379049 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.379058 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.380033 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.380079 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.380091 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.381838 4728 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6" exitCode=0 Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.381916 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.381947 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.382876 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.382906 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.382915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.383917 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.384330 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f927842bae27f6ef715fcbfdbbb0dbb79e59e4706e28bffd6331140f8a66d7f7"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.384780 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.384833 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.384842 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.388306 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.388345 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.388351 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.388418 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.388359 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86"} Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389620 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389687 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389635 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.389811 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.568705 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.570429 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.570473 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.570485 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:49 crc kubenswrapper[4728]: I1205 11:07:49.570518 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.397347 4728 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164" exitCode=0 Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.397487 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.397559 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.397632 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398109 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398143 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398114 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164"} Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398287 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398654 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398705 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.398725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399449 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399566 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399520 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399719 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399733 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399470 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399765 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.399775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:50 crc kubenswrapper[4728]: I1205 11:07:50.617248 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.268280 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.268529 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.269928 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.269993 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.270003 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.275707 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.319772 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405522 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405514 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719"} Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405774 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405834 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da"} Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405857 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee"} Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.405546 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.406454 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407111 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407137 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407806 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407839 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407851 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407907 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407970 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:51 crc kubenswrapper[4728]: I1205 11:07:51.407999 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.386300 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.414443 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13"} Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.414502 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c"} Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.414510 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.414602 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.414622 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416127 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416175 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416189 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416224 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416196 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.416979 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.417060 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.417098 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.417695 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.450444 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 11:07:52 crc kubenswrapper[4728]: I1205 11:07:52.700568 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.417885 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.417935 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.419548 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.419604 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.419622 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.420440 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.420498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.420520 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.860613 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.861030 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.862495 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.862532 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:53 crc kubenswrapper[4728]: I1205 11:07:53.862540 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.421611 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.421611 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.423304 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.423363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.423381 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.424233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.424314 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:54 crc kubenswrapper[4728]: I1205 11:07:54.424340 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:55 crc kubenswrapper[4728]: I1205 11:07:55.418511 4728 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 11:07:55 crc kubenswrapper[4728]: I1205 11:07:55.418608 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:07:56 crc kubenswrapper[4728]: E1205 11:07:56.465301 4728 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 11:07:57 crc kubenswrapper[4728]: I1205 11:07:57.073514 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 11:07:57 crc kubenswrapper[4728]: I1205 11:07:57.073836 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:57 crc kubenswrapper[4728]: I1205 11:07:57.075492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:57 crc kubenswrapper[4728]: I1205 11:07:57.075574 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:57 crc kubenswrapper[4728]: I1205 11:07:57.075588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:58 crc kubenswrapper[4728]: I1205 11:07:58.043539 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:07:58 crc kubenswrapper[4728]: I1205 11:07:58.043675 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:07:58 crc kubenswrapper[4728]: I1205 11:07:58.045032 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:07:58 crc kubenswrapper[4728]: I1205 11:07:58.045072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:07:58 crc kubenswrapper[4728]: I1205 11:07:58.045086 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:07:59 crc kubenswrapper[4728]: E1205 11:07:59.571960 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 05 11:07:59 crc kubenswrapper[4728]: W1205 11:07:59.650858 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 11:07:59 crc kubenswrapper[4728]: I1205 11:07:59.650964 4728 trace.go:236] Trace[1033717352]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 11:07:49.649) (total time: 10001ms): Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1033717352]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:07:59.650) Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1033717352]: [10.001673027s] [10.001673027s] END Dec 05 11:07:59 crc kubenswrapper[4728]: E1205 11:07:59.650990 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 11:07:59 crc kubenswrapper[4728]: W1205 11:07:59.696781 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 11:07:59 crc kubenswrapper[4728]: I1205 11:07:59.697116 4728 trace.go:236] Trace[1340489383]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 11:07:49.695) (total time: 10002ms): Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1340489383]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:07:59.696) Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1340489383]: [10.002041447s] [10.002041447s] END Dec 05 11:07:59 crc kubenswrapper[4728]: E1205 11:07:59.697292 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 11:07:59 crc kubenswrapper[4728]: W1205 11:07:59.940178 4728 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 11:07:59 crc kubenswrapper[4728]: I1205 11:07:59.940281 4728 trace.go:236] Trace[1676234975]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 11:07:49.938) (total time: 10001ms): Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1676234975]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:07:59.940) Dec 05 11:07:59 crc kubenswrapper[4728]: Trace[1676234975]: [10.001428957s] [10.001428957s] END Dec 05 11:07:59 crc kubenswrapper[4728]: E1205 11:07:59.940303 4728 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 11:08:00 crc kubenswrapper[4728]: I1205 11:08:00.077604 4728 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 11:08:00 crc kubenswrapper[4728]: I1205 11:08:00.124876 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 11:08:00 crc kubenswrapper[4728]: I1205 11:08:00.124960 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 11:08:00 crc kubenswrapper[4728]: I1205 11:08:00.133544 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 11:08:00 crc kubenswrapper[4728]: I1205 11:08:00.133615 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 11:08:02 crc kubenswrapper[4728]: I1205 11:08:02.772766 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:08:02 crc kubenswrapper[4728]: I1205 11:08:02.774183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:02 crc kubenswrapper[4728]: I1205 11:08:02.774246 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:02 crc kubenswrapper[4728]: I1205 11:08:02.774264 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:02 crc kubenswrapper[4728]: I1205 11:08:02.774304 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:08:02 crc kubenswrapper[4728]: E1205 11:08:02.778197 4728 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.866628 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.866853 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.868360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.868407 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.868418 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:03 crc kubenswrapper[4728]: I1205 11:08:03.872341 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:08:04 crc kubenswrapper[4728]: I1205 11:08:04.446290 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:08:04 crc kubenswrapper[4728]: I1205 11:08:04.447064 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:04 crc kubenswrapper[4728]: I1205 11:08:04.447097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:04 crc kubenswrapper[4728]: I1205 11:08:04.447108 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.024508 4728 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:05 crc kubenswrapper[4728]: E1205 11:08:05.104925 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.107532 4728 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.111925 4728 trace.go:236] Trace[939498835]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 11:07:50.120) (total time: 14990ms): Dec 05 11:08:05 crc kubenswrapper[4728]: Trace[939498835]: ---"Objects listed" error: 14990ms (11:08:05.111) Dec 05 11:08:05 crc kubenswrapper[4728]: Trace[939498835]: [14.990960277s] [14.990960277s] END Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.111951 4728 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.112261 4728 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.223044 4728 csr.go:261] certificate signing request csr-ncstc is approved, waiting to be issued Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.258982 4728 csr.go:257] certificate signing request csr-ncstc is issued Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.335397 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:34100->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.335478 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:34100->192.168.126.11:17697: read: connection reset by peer" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.335399 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:34090->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.335586 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:34090->192.168.126.11:17697: read: connection reset by peer" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.335961 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.336020 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.419147 4728 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.419226 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.449858 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.451400 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13" exitCode=255 Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.451440 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13"} Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.451874 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.454897 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.454962 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.454981 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.455920 4728 scope.go:117] "RemoveContainer" containerID="9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13" Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.645278 4728 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.965212 4728 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:05 crc kubenswrapper[4728]: I1205 11:08:05.992704 4728 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 05 11:08:05 crc kubenswrapper[4728]: E1205 11:08:05.992910 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events\": read tcp 38.102.83.146:41268->38.102.83.146:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.187e4d17bfddfc4c openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:07:48.37439598 +0000 UTC m=+2.516518673,LastTimestamp:2025-12-05 11:07:48.37439598 +0000 UTC m=+2.516518673,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:08:05 crc kubenswrapper[4728]: W1205 11:08:05.993011 4728 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 05 11:08:05 crc kubenswrapper[4728]: W1205 11:08:05.993032 4728 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 05 11:08:05 crc kubenswrapper[4728]: W1205 11:08:05.993028 4728 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 05 11:08:05 crc kubenswrapper[4728]: W1205 11:08:05.993077 4728 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.082242 4728 apiserver.go:52] "Watching apiserver" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.085374 4728 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.085597 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-85f5z","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086090 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086325 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086384 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086455 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.086452 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086474 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086474 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.086583 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.086686 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.086869 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.088757 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.088765 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089337 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089372 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089375 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089456 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089517 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.089890 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.090079 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.090133 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.090160 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.090447 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.109829 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.124124 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.134986 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.148977 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.159574 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.166782 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.177611 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.181824 4728 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.189453 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.201118 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214501 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214561 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214589 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214611 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214634 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214657 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214679 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214704 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214728 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214749 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214772 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214816 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214841 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214882 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214907 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214931 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214930 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214954 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.214984 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215031 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215040 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215059 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215162 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215195 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215215 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215220 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215248 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215280 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215304 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215327 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215353 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215411 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215416 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215458 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215486 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215501 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215511 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215534 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215572 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215610 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215640 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215673 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215704 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215735 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215818 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215850 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215878 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215906 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215933 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215966 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215995 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216068 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216098 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216127 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216154 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216182 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216212 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216241 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216271 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216299 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216329 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216361 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216392 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216420 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216447 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216479 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216509 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216538 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216569 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216600 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216634 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216662 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216692 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216723 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216752 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216782 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216834 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216865 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216898 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216930 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216963 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216996 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217029 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217063 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217370 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217412 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217443 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217473 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217504 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217536 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217565 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217594 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217623 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217654 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217681 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217740 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217769 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217816 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217845 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217879 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217911 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217990 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218031 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218062 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218105 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218256 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218300 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218334 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218365 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218398 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218432 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218466 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218499 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218531 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218570 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218602 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218634 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218666 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218709 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218739 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218769 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219114 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219161 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219193 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219226 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219258 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219289 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219320 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219354 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219391 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219432 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219462 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219493 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219530 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219569 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219598 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219636 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219667 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219697 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219733 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215642 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215690 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215893 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.215919 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216194 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216229 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216339 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216444 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216570 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216651 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216672 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216753 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216772 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219903 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216878 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216898 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216955 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216974 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.216982 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217060 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.217871 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218110 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218142 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218341 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218848 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218858 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218880 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.218948 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219073 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219106 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219126 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219155 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219301 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219354 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219364 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219369 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.220035 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219587 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219698 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219709 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219745 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219817 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.220094 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219895 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.220407 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.220723 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.220806 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.221035 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.221277 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.221677 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.221780 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.222122 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.222209 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.222847 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.222868 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.222930 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.223300 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.223326 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.223756 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.223912 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224086 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224304 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224316 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224561 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224632 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224814 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.224955 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225114 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225286 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225290 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225325 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225513 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225918 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225918 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.225972 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226122 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.226121 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:06.726045806 +0000 UTC m=+20.868168579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226252 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226278 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226460 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226765 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.226868 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227002 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227136 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227227 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227252 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227264 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227292 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.227451 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.228516 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.228856 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.228868 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.228969 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.229553 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.230226 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.230551 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.231813 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.232136 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.232679 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.232827 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233069 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233157 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233455 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233490 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.219776 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233518 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233564 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233603 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233634 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233673 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233731 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233782 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233904 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233947 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233980 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.233986 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234010 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234013 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234060 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234086 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234109 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234134 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234166 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234187 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234209 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234230 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234254 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234278 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234300 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234323 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234347 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234368 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234390 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234387 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234411 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234436 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234460 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234482 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234503 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234526 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234559 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234581 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234601 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234624 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234644 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234665 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234686 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234706 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234727 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234753 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234775 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234786 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234816 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.234881 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235186 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235326 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235333 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235512 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235669 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.235850 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.236015 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.236160 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.236193 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237052 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237499 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237576 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237782 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237857 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237891 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237916 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.237940 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238153 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238847 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238899 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238925 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238952 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.238985 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239009 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239032 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239055 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239132 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239157 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239188 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239216 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239241 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239292 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239323 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239347 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239372 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239403 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f536e7a4-ad53-442e-b7c3-8928fcd89f22-hosts-file\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239425 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r72s7\" (UniqueName: \"kubernetes.io/projected/f536e7a4-ad53-442e-b7c3-8928fcd89f22-kube-api-access-r72s7\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239451 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239477 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239504 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239527 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239554 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239579 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239620 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239645 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239669 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239693 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239782 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239828 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239843 4728 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239856 4728 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239869 4728 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239882 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239893 4728 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239905 4728 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239916 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239928 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239940 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239951 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239966 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239980 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.239991 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240003 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240014 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240027 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240039 4728 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240050 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240062 4728 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240073 4728 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240086 4728 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240099 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240111 4728 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240123 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240135 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240146 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240159 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240172 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240184 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240195 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240207 4728 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240218 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240229 4728 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240243 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240255 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240267 4728 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240278 4728 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240290 4728 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240301 4728 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240312 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240324 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240335 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240347 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240359 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240372 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240386 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240399 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240413 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240425 4728 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240436 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240447 4728 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240458 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240470 4728 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240481 4728 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240493 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240505 4728 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240516 4728 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240529 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240542 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240556 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240571 4728 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240583 4728 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240595 4728 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240607 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240619 4728 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240631 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240642 4728 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240654 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240665 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240676 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240687 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240699 4728 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240711 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240722 4728 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240734 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240746 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240757 4728 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240767 4728 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240778 4728 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240803 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240816 4728 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240829 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240841 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240853 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240865 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240876 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240887 4728 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240899 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240912 4728 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240923 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240934 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240946 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240957 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240969 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240980 4728 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.240991 4728 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241002 4728 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241013 4728 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241024 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241036 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241048 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241058 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241070 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241081 4728 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241091 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241102 4728 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241113 4728 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241126 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241136 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241148 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241160 4728 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241171 4728 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241181 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241192 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241203 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241214 4728 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241224 4728 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241235 4728 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241251 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241263 4728 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241276 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241289 4728 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241302 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241314 4728 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241327 4728 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241337 4728 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241349 4728 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241344 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241361 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241620 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.241645 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.242027 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.242132 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.242459 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.242614 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.242892 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.243001 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.243399 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.243553 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.243769 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244061 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244111 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244404 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244456 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244640 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244845 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.244844 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245029 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245034 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245144 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245237 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245331 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.245432 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.245592 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.245637 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:06.745615141 +0000 UTC m=+20.887737834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246324 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246425 4728 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.246532 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.246626 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:06.746605903 +0000 UTC m=+20.888728666 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246645 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246708 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246869 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.246995 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247023 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247158 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247578 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247720 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247733 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247888 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248347 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248357 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.247749 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248375 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248856 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248614 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248691 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248842 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248890 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.249598 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.249725 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.249785 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.251055 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.251087 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.252021 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.252115 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.252600 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.252870 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.252957 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.248026 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.258998 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.259072 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.259758 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.259845 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-05 11:03:05 +0000 UTC, rotation deadline is 2026-10-09 08:38:57.938205268 +0000 UTC Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.259892 4728 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7389h30m51.678316579s for next certificate rotation Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.278361 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.279607 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.279705 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.279734 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.279748 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.279910 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:06.779884923 +0000 UTC m=+20.922007626 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.280131 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.280542 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.280661 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.281275 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.281304 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.281319 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.281371 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:06.781353155 +0000 UTC m=+20.923475918 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.283597 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.284307 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.284699 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.286037 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.287199 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.305294 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.324256 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.334537 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342509 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f536e7a4-ad53-442e-b7c3-8928fcd89f22-hosts-file\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342559 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r72s7\" (UniqueName: \"kubernetes.io/projected/f536e7a4-ad53-442e-b7c3-8928fcd89f22-kube-api-access-r72s7\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342590 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342627 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342664 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342676 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342687 4728 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342698 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342708 4728 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342719 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342728 4728 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342738 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342748 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342759 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342769 4728 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342779 4728 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342812 4728 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342825 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342836 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342846 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342856 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342866 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342876 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342885 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342896 4728 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342906 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342917 4728 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342927 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342937 4728 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342948 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342958 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342968 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342979 4728 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342989 4728 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.342999 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343010 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343021 4728 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343034 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343045 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343056 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343066 4728 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343076 4728 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343086 4728 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343097 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343109 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343120 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343179 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343239 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f536e7a4-ad53-442e-b7c3-8928fcd89f22-hosts-file\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343524 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343551 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343564 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343577 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343588 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343599 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343609 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343620 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343630 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343642 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343652 4728 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343662 4728 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343672 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343682 4728 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343694 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343705 4728 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343716 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343727 4728 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343737 4728 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343749 4728 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343759 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343770 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.343782 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.356845 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.357346 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.358696 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.359484 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.360166 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.360720 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.361908 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.362184 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r72s7\" (UniqueName: \"kubernetes.io/projected/f536e7a4-ad53-442e-b7c3-8928fcd89f22-kube-api-access-r72s7\") pod \"node-resolver-85f5z\" (UID: \"f536e7a4-ad53-442e-b7c3-8928fcd89f22\") " pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.362557 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.363294 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.363762 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.364423 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.365648 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.366401 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.367298 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.367789 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.368281 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.369278 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.369878 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.370596 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.371151 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.371789 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.372910 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.373568 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.374142 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.375480 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.375766 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.376139 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.377214 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.377960 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.378770 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.379314 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.380228 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.380655 4728 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.380745 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.382806 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.383266 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.383707 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.385594 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.386594 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.387126 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.388378 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.389194 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.390225 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.390970 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.391170 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.392162 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.393482 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.394077 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.394629 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.395471 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.396582 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.396974 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-85f5z" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.397173 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.397764 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.398757 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.399929 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.401227 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.401738 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.404057 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.410094 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.411965 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.419464 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.423495 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.448836 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.475755 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.476401 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.478934 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77"} Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.480027 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.481170 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c109fab0b9a2381f51e510a344bbfaa77437b39c1ddf540c84ead113eefde6ef"} Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.483249 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a9c4d0ea3091e66d590de0a3dee0600355bf6fe685438696249c0d5747872221"} Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.490226 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"94472a952861c0737375089fce6a81e871a5dd45d6838018a33b7792e8bd74f7"} Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.498289 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-85f5z" event={"ID":"f536e7a4-ad53-442e-b7c3-8928fcd89f22","Type":"ContainerStarted","Data":"d1d685b9b18ab219f407e611dd55532469355a657d02cf4c2ef62754c202445b"} Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.503049 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.534140 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.580088 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.601965 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.657568 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.663875 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.678650 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.694743 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.698600 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gf8np"] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.698994 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.699049 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-w8qlp"] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.699369 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-8pwbb"] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.699596 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.700048 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703258 4728 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.703309 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.703342 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703399 4728 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703414 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703466 4728 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703478 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.703509 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703541 4728 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.703357 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703582 4728 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703585 4728 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703595 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703553 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.703268 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703618 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703302 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: W1205 11:08:06.703647 4728 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.703668 4728 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.713067 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.728535 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.745238 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.752586 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.752665 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.752693 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.752754 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.752812 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:07.752784191 +0000 UTC m=+21.894906884 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.752862 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:07.752855483 +0000 UTC m=+21.894978176 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.752916 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.752937 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:07.752931804 +0000 UTC m=+21.895054497 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.756947 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.770766 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.788320 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.801443 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.817067 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.828909 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.842486 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853319 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853681 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-k8s-cni-cncf-io\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853706 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bfa60b-fcb6-4519-abc5-c25fea50921d-mcd-auth-proxy-config\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853738 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853789 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-hostroot\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853821 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-socket-dir-parent\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853835 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-multus-certs\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853850 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853865 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-os-release\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853878 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-multus\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853891 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq4mx\" (UniqueName: \"kubernetes.io/projected/e18c7d32-4ecb-4931-931e-56a7898cb233-kube-api-access-bq4mx\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853906 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853920 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-cni-binary-copy\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853933 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-netns\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-os-release\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853967 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.853984 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-system-cni-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854003 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854025 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-system-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854042 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-cnibin\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854071 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bfa60b-fcb6-4519-abc5-c25fea50921d-rootfs\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854085 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854098 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-etc-kubernetes\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854111 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-cnibin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854126 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-bin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854147 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-kubelet\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854160 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxmch\" (UniqueName: \"kubernetes.io/projected/f292da29-a632-47aa-8bcc-2d999eaa6c11-kube-api-access-dxmch\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854175 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-conf-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854191 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r7pf\" (UniqueName: \"kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854212 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-binary-copy\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.854226 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-daemon-config\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854341 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854355 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854366 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854398 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:07.854386054 +0000 UTC m=+21.996508747 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854494 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854506 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854514 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: E1205 11:08:06.854538 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:07.854529497 +0000 UTC m=+21.996652190 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.864627 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.878317 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.887277 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.897489 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.911261 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.919757 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.931243 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.942495 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.950862 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955302 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-system-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955330 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-system-cni-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955347 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955363 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bfa60b-fcb6-4519-abc5-c25fea50921d-rootfs\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955377 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955393 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-cnibin\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955406 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-cnibin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955421 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-bin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955435 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-etc-kubernetes\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955452 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-kubelet\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955481 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxmch\" (UniqueName: \"kubernetes.io/projected/f292da29-a632-47aa-8bcc-2d999eaa6c11-kube-api-access-dxmch\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955486 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-cnibin\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955497 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-system-cni-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955500 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-conf-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955529 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-bin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955538 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-conf-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955553 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-system-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955573 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-etc-kubernetes\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955621 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bfa60b-fcb6-4519-abc5-c25fea50921d-rootfs\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955630 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r7pf\" (UniqueName: \"kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955705 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-cnibin\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955763 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-binary-copy\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955706 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-kubelet\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955829 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-daemon-config\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-k8s-cni-cncf-io\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955883 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bfa60b-fcb6-4519-abc5-c25fea50921d-mcd-auth-proxy-config\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955912 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-hostroot\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955930 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-k8s-cni-cncf-io\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955936 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-socket-dir-parent\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.955997 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-socket-dir-parent\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956002 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-multus-certs\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956015 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-hostroot\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956019 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-multus-certs\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956040 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956064 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956084 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-os-release\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956122 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-multus\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956147 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq4mx\" (UniqueName: \"kubernetes.io/projected/e18c7d32-4ecb-4931-931e-56a7898cb233-kube-api-access-bq4mx\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956170 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-cni-binary-copy\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956191 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-netns\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956170 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-cni-dir\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956221 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-os-release\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956173 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-var-lib-cni-multus\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956247 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-host-run-netns\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956381 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-os-release\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956382 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f292da29-a632-47aa-8bcc-2d999eaa6c11-os-release\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956535 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-multus-daemon-config\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956579 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-binary-copy\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956756 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f292da29-a632-47aa-8bcc-2d999eaa6c11-cni-binary-copy\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.956876 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e18c7d32-4ecb-4931-931e-56a7898cb233-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.979241 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq4mx\" (UniqueName: \"kubernetes.io/projected/e18c7d32-4ecb-4931-931e-56a7898cb233-kube-api-access-bq4mx\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:06 crc kubenswrapper[4728]: I1205 11:08:06.979241 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxmch\" (UniqueName: \"kubernetes.io/projected/f292da29-a632-47aa-8bcc-2d999eaa6c11-kube-api-access-dxmch\") pod \"multus-gf8np\" (UID: \"f292da29-a632-47aa-8bcc-2d999eaa6c11\") " pod="openshift-multus/multus-gf8np" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.030254 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gf8np" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.106809 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wchlf"] Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.107820 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.111195 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.111424 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.111565 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.111697 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.111841 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.112007 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.112395 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.136056 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.151699 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.163924 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.164694 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.164807 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.182701 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.195477 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.217210 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.228653 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.243350 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261046 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261093 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6h7\" (UniqueName: \"kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261126 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261150 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261187 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261207 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261226 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261275 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261298 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261319 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261340 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261358 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261375 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261396 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261414 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261440 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261483 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261523 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261554 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.261598 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.262741 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.273632 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.286735 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.299343 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.315946 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.337084 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.351247 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.351381 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.351542 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362627 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362673 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362698 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362720 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362737 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362739 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362759 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362805 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362822 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362822 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362862 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362841 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362889 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362906 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362955 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362964 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362978 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362981 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362999 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363004 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363030 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6h7\" (UniqueName: \"kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363046 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363120 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363139 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363178 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363202 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363490 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363515 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.362910 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363563 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363592 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363622 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363878 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363898 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.363942 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.364021 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.364277 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.364869 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.365542 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.386473 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.387111 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6h7\" (UniqueName: \"kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7\") pod \"ovnkube-node-wchlf\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.409861 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.419488 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.450912 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.488829 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: W1205 11:08:07.491822 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c5fa466_a6b6_4c17_b4b7_aff5b6311cc5.slice/crio-9e08482aa0ee5a9538ff22bcfff97d286de234fff549b63fca921a612fb714aa WatchSource:0}: Error finding container 9e08482aa0ee5a9538ff22bcfff97d286de234fff549b63fca921a612fb714aa: Status 404 returned error can't find the container with id 9e08482aa0ee5a9538ff22bcfff97d286de234fff549b63fca921a612fb714aa Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.503672 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.503717 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.505672 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.506926 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerStarted","Data":"6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.506955 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerStarted","Data":"35e74502f05ced7ad0b451ffdd20bed41fb3c958bbab6d2ef214b4a20ccec2a1"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.508868 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-85f5z" event={"ID":"f536e7a4-ad53-442e-b7c3-8928fcd89f22","Type":"ContainerStarted","Data":"180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.510759 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"9e08482aa0ee5a9538ff22bcfff97d286de234fff549b63fca921a612fb714aa"} Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.529255 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.543760 4728 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.580015 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.587613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bfa60b-fcb6-4519-abc5-c25fea50921d-mcd-auth-proxy-config\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.609811 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.647213 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.687094 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.727540 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.766284 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.766382 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.766421 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.766518 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.766570 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:09.766556244 +0000 UTC m=+23.908678937 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.766630 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:09.766620646 +0000 UTC m=+23.908743339 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.766698 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.766730 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:09.766721648 +0000 UTC m=+23.908844341 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.775741 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.811345 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.819736 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.826320 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e18c7d32-4ecb-4931-931e-56a7898cb233-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8pwbb\" (UID: \"e18c7d32-4ecb-4931-931e-56a7898cb233\") " pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.867273 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.867347 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867453 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867468 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867479 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867495 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867532 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867546 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867518 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:09.867505349 +0000 UTC m=+24.009628042 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.867638 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:09.867613221 +0000 UTC m=+24.009735964 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.876469 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.906206 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.949404 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.956550 4728 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.956818 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls podName:95bfa60b-fcb6-4519-abc5-c25fea50921d nodeName:}" failed. No retries permitted until 2025-12-05 11:08:08.456776421 +0000 UTC m=+22.598899114 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls") pod "machine-config-daemon-w8qlp" (UID: "95bfa60b-fcb6-4519-abc5-c25fea50921d") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:08:07 crc kubenswrapper[4728]: E1205 11:08:07.969338 4728 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.987602 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:07Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:07 crc kubenswrapper[4728]: I1205 11:08:07.999709 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.088874 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.103431 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.129642 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.168156 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.214065 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.220249 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.259482 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.287464 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.299459 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.303822 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" Dec 05 11:08:08 crc kubenswrapper[4728]: W1205 11:08:08.317668 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode18c7d32_4ecb_4931_931e_56a7898cb233.slice/crio-51e7f9ad238ae00cfbd8d5bf6f3bb1a01841bed4dd751cccf4dbb19b6cd563d4 WatchSource:0}: Error finding container 51e7f9ad238ae00cfbd8d5bf6f3bb1a01841bed4dd751cccf4dbb19b6cd563d4: Status 404 returned error can't find the container with id 51e7f9ad238ae00cfbd8d5bf6f3bb1a01841bed4dd751cccf4dbb19b6cd563d4 Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.323568 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 11:08:08 crc kubenswrapper[4728]: E1205 11:08:08.330382 4728 projected.go:194] Error preparing data for projected volume kube-api-access-6r7pf for pod openshift-machine-config-operator/machine-config-daemon-w8qlp: failed to sync configmap cache: timed out waiting for the condition Dec 05 11:08:08 crc kubenswrapper[4728]: E1205 11:08:08.330494 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf podName:95bfa60b-fcb6-4519-abc5-c25fea50921d nodeName:}" failed. No retries permitted until 2025-12-05 11:08:08.8304597 +0000 UTC m=+22.972582433 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-6r7pf" (UniqueName: "kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf") pod "machine-config-daemon-w8qlp" (UID: "95bfa60b-fcb6-4519-abc5-c25fea50921d") : failed to sync configmap cache: timed out waiting for the condition Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.351200 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.351232 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:08 crc kubenswrapper[4728]: E1205 11:08:08.351312 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:08 crc kubenswrapper[4728]: E1205 11:08:08.351386 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.370407 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.410488 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.472473 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.475778 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bfa60b-fcb6-4519-abc5-c25fea50921d-proxy-tls\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.515156 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerStarted","Data":"51e7f9ad238ae00cfbd8d5bf6f3bb1a01841bed4dd751cccf4dbb19b6cd563d4"} Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.517008 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" exitCode=0 Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.517078 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.530001 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.544716 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.555976 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.566054 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.610371 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.646054 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.687983 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.727917 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.772320 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.816751 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.850172 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.876401 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r7pf\" (UniqueName: \"kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.880965 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r7pf\" (UniqueName: \"kubernetes.io/projected/95bfa60b-fcb6-4519-abc5-c25fea50921d-kube-api-access-6r7pf\") pod \"machine-config-daemon-w8qlp\" (UID: \"95bfa60b-fcb6-4519-abc5-c25fea50921d\") " pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.888133 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:08 crc kubenswrapper[4728]: I1205 11:08:08.928530 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:08Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.164097 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.179122 4728 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.180966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.181061 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.181091 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.181350 4728 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.192163 4728 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.192552 4728 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.194237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.194285 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.194300 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.194323 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.194337 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.216496 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.221255 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.221311 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.221328 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.221350 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.221368 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.234564 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.237957 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.238008 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.238020 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.238039 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.238051 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.260114 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.265679 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.265734 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.265754 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.265780 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.265805 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.281691 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.286893 4728 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.292961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.293033 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.293048 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.293069 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.293084 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.308777 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-zpkw4"] Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.309217 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.309377 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.310203 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.311848 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312332 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312411 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312416 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312438 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312474 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.312500 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.313164 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.326126 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.338419 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.350951 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.351070 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.353751 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.363057 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.371545 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.381105 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.400836 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.413044 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.414520 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.414558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.414575 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.414595 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.414612 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.427829 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: W1205 11:08:09.444253 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95bfa60b_fcb6_4519_abc5_c25fea50921d.slice/crio-0ca6e85585fcfdc82d1d048571295b25ab3691077257f14c5be11179f2eadf2d WatchSource:0}: Error finding container 0ca6e85585fcfdc82d1d048571295b25ab3691077257f14c5be11179f2eadf2d: Status 404 returned error can't find the container with id 0ca6e85585fcfdc82d1d048571295b25ab3691077257f14c5be11179f2eadf2d Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.467491 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.482900 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqqjf\" (UniqueName: \"kubernetes.io/projected/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-kube-api-access-sqqjf\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.482949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-host\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.482987 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-serviceca\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.506728 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.516464 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.516491 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.516499 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.516513 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.516524 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.522508 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.522554 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.522594 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.522608 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.523438 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"0ca6e85585fcfdc82d1d048571295b25ab3691077257f14c5be11179f2eadf2d"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.524593 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.525676 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e" exitCode=0 Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.525709 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.550142 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.584097 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqqjf\" (UniqueName: \"kubernetes.io/projected/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-kube-api-access-sqqjf\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.584143 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-host\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.584171 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-serviceca\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.584238 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-host\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.584970 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-serviceca\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.590778 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.615594 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqqjf\" (UniqueName: \"kubernetes.io/projected/27ed53a3-7ee5-4d66-9e47-be49a9cd1b05-kube-api-access-sqqjf\") pod \"node-ca-zpkw4\" (UID: \"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\") " pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.618841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.618882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.618894 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.618912 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.618924 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.626492 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zpkw4" Dec 05 11:08:09 crc kubenswrapper[4728]: W1205 11:08:09.639590 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27ed53a3_7ee5_4d66_9e47_be49a9cd1b05.slice/crio-9248b70455a15e9fa6097cc860112f80066ec964c4550765ef40b4a399320996 WatchSource:0}: Error finding container 9248b70455a15e9fa6097cc860112f80066ec964c4550765ef40b4a399320996: Status 404 returned error can't find the container with id 9248b70455a15e9fa6097cc860112f80066ec964c4550765ef40b4a399320996 Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.646759 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.700320 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.722340 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.722370 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.722379 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.722393 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.722401 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.746035 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.766885 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.785463 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.785606 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.785630 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:13.785601468 +0000 UTC m=+27.927724201 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.785679 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.785724 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.785778 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:13.785762312 +0000 UTC m=+27.927885085 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.785857 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.785894 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:13.785886805 +0000 UTC m=+27.928009618 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.818321 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.824395 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.824423 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.824431 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.824444 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.824452 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.850304 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.886487 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.886739 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.886804 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.886974 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887000 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887014 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887061 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:13.887041575 +0000 UTC m=+28.029164268 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887066 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887084 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887094 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:09 crc kubenswrapper[4728]: E1205 11:08:09.887132 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:13.887118237 +0000 UTC m=+28.029240930 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.927087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.927117 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.927133 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.927147 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.927156 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:09Z","lastTransitionTime":"2025-12-05T11:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.929526 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:09 crc kubenswrapper[4728]: I1205 11:08:09.969118 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:09Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.015863 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.029623 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.029658 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.029670 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.029688 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.029699 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.047510 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.088949 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.129682 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.132145 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.132179 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.132188 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.132200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.132209 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.184377 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.209911 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.235197 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.235435 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.235445 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.235459 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.235469 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.338378 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.338448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.338468 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.338494 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.338534 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.351954 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.352037 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:10 crc kubenswrapper[4728]: E1205 11:08:10.352095 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:10 crc kubenswrapper[4728]: E1205 11:08:10.352187 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.440350 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.440386 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.440394 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.440408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.440416 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.532370 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.532417 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.534128 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.534178 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.535259 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zpkw4" event={"ID":"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05","Type":"ContainerStarted","Data":"71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.535296 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zpkw4" event={"ID":"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05","Type":"ContainerStarted","Data":"9248b70455a15e9fa6097cc860112f80066ec964c4550765ef40b4a399320996"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.538422 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297" exitCode=0 Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.538486 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.548037 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.548072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.548080 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.548095 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.548105 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.552193 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.566331 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.577159 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.590880 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.603530 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.619487 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.631447 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.642993 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.651263 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.651466 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.651559 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.651641 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.651698 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.658127 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.671969 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.692436 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.707467 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.728434 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.755563 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.755596 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.755604 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.755618 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.755627 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.771031 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.810466 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.847835 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.857235 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.857267 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.857274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.857287 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.857295 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.891098 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.928902 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.960039 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.960092 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.960104 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.960122 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.960135 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:10Z","lastTransitionTime":"2025-12-05T11:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:10 crc kubenswrapper[4728]: I1205 11:08:10.973756 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.007599 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.050399 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.062497 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.062567 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.062585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.062609 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.062624 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.088573 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.133157 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.165587 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.165631 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.165641 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.165654 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.165663 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.168273 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.220534 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.250396 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.268626 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.268666 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.268680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.268697 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.268708 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.292705 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.333954 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.351083 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:11 crc kubenswrapper[4728]: E1205 11:08:11.351261 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.371081 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.371130 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.371146 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.371166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.371181 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.475500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.475552 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.475569 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.475594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.475615 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.543198 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101" exitCode=0 Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.543322 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.556890 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.570717 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.578320 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.578374 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.578386 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.578404 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.578419 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.590873 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.604454 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.619036 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.633826 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.649333 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.671654 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.680791 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.680842 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.680850 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.680866 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.680875 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.686601 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.729949 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.767839 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.783108 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.783149 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.783160 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.783178 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.783190 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.821411 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.848103 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.886145 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.886176 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.886185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.886198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.886210 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.891128 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:11Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.989373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.989553 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.989570 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.989587 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:11 crc kubenswrapper[4728]: I1205 11:08:11.989611 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:11Z","lastTransitionTime":"2025-12-05T11:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.091975 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.092028 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.092043 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.092063 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.092078 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.194885 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.194924 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.194938 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.194956 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.194968 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.297631 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.297671 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.297683 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.297703 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.297717 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.351609 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:12 crc kubenswrapper[4728]: E1205 11:08:12.351755 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.351875 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:12 crc kubenswrapper[4728]: E1205 11:08:12.352132 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.400606 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.400645 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.400659 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.400675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.400686 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.425661 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.429521 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.437445 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.444236 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.459682 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.474601 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.490281 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.502986 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.503066 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.503089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.503118 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.503144 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.504799 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.521258 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.535504 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.550362 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.552749 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624" exitCode=0 Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.552826 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.556258 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.565337 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.575620 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.585105 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.602111 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.605591 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.605656 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.605669 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.605685 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.605696 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.615862 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.628937 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.649367 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.662297 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.674196 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.686129 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.698451 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.707559 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.707592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.707605 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.707620 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.707630 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.711520 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.748059 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.789563 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.810375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.810402 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.810410 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.810423 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.810431 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.834098 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.873026 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.908765 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.913242 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.913291 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.913308 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.913344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.913355 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:12Z","lastTransitionTime":"2025-12-05T11:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.952075 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:12Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:12 crc kubenswrapper[4728]: I1205 11:08:12.980565 4728 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.011495 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.016275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.016327 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.016344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.016367 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.016384 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.055328 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.088334 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.119366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.119425 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.119444 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.119472 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.119492 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.223109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.223172 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.223195 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.223218 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.223232 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.325835 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.325899 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.325918 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.325943 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.325962 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.351337 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.351522 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.429629 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.429694 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.429711 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.429734 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.429752 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.531830 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.531903 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.531930 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.531959 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.531981 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.786610 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.786738 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.786826 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.786931 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.786978 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.787022 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.78699864 +0000 UTC m=+35.929121333 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.786993 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.787050 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.787071 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.787045171 +0000 UTC m=+35.929167864 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.787127 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.787104793 +0000 UTC m=+35.929227486 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.787067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.787185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.787203 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.888064 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.888206 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888426 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888472 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888491 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888431 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888639 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888670 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888573 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.888544279 +0000 UTC m=+36.030667012 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:13 crc kubenswrapper[4728]: E1205 11:08:13.888752 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.888732044 +0000 UTC m=+36.030854747 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.890239 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.890289 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.890308 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.890330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.890346 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.993181 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.993231 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.993241 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.993256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:13 crc kubenswrapper[4728]: I1205 11:08:13.993266 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:13Z","lastTransitionTime":"2025-12-05T11:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.095352 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.095416 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.095429 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.095447 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.095488 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.198900 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.199294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.199313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.199336 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.199353 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.301826 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.301861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.301873 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.301888 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.301899 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.351380 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.351436 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:14 crc kubenswrapper[4728]: E1205 11:08:14.351560 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:14 crc kubenswrapper[4728]: E1205 11:08:14.351703 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.403998 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.404091 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.404123 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.404157 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.404183 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.468001 4728 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.506705 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.506765 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.506783 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.506885 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.506916 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.610558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.610610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.610626 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.610647 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.610664 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.713514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.713589 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.713608 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.714075 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.714139 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.797825 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f" exitCode=0 Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.797891 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.816956 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.817025 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.817045 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.817068 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.817085 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.834882 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.857610 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.876213 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.893316 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.917732 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.919616 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.919678 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.919701 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.919730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.919752 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:14Z","lastTransitionTime":"2025-12-05T11:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.937361 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.952218 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.968541 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:14 crc kubenswrapper[4728]: I1205 11:08:14.988708 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.004209 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.015310 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.022275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.022362 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.022375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.022391 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.022402 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.027158 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.041019 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.064276 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.074930 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.124675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.124721 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.124733 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.124750 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.124762 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.227784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.227838 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.227847 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.227863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.227873 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.330816 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.330866 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.330878 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.330904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.330918 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.351190 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:15 crc kubenswrapper[4728]: E1205 11:08:15.351348 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.433957 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.434006 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.434019 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.434035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.434046 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.536105 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.536141 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.536150 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.536164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.536176 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.639278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.639329 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.639344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.639363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.639375 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.741442 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.741484 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.741496 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.741514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.741527 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.804566 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.804783 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.807672 4728 generic.go:334] "Generic (PLEG): container finished" podID="e18c7d32-4ecb-4931-931e-56a7898cb233" containerID="e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872" exitCode=0 Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.807719 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerDied","Data":"e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.819834 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.840051 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.840164 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.844519 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.844553 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.844564 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.844579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.844591 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.850341 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.862494 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.878174 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.896851 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.908956 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.922700 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.939061 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950208 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950218 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950232 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950243 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:15Z","lastTransitionTime":"2025-12-05T11:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.950872 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.965359 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.980365 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:15 crc kubenswrapper[4728]: I1205 11:08:15.996816 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:15Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.011366 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.026986 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.046591 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.052514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.052544 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.052554 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.052569 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.052578 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.058325 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.072605 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.091538 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.103721 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.120022 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.131993 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.146065 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.155382 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.155417 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.155428 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.155444 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.155456 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.158188 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.171529 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.182114 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.193852 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.208572 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.228121 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.240674 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.257271 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.257294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.257305 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.257319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.257330 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.352026 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.352157 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:16 crc kubenswrapper[4728]: E1205 11:08:16.352214 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:16 crc kubenswrapper[4728]: E1205 11:08:16.352315 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.359923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.359970 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.359982 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.360002 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.360014 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.363737 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.380600 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.397544 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.420656 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.435463 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.458363 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.461950 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.461977 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.461985 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.461997 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.462009 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.477543 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.491427 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.506053 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.523819 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.539832 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.551781 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.563294 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.564761 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.564823 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.564841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.564861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.564873 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.574845 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.587954 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.667637 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.667692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.667703 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.667722 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.667734 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.770101 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.770134 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.770144 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.770158 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.770168 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.814903 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.815179 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" event={"ID":"e18c7d32-4ecb-4931-931e-56a7898cb233","Type":"ContainerStarted","Data":"6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.815338 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.825974 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.837066 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.840121 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.857815 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.873073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.873114 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.873125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.873139 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.873150 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.879038 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:16 crc kubenswrapper[4728]: I1205 11:08:16.930982 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.946919 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.963501 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.976473 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.976504 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.976517 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.976535 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.976547 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:16Z","lastTransitionTime":"2025-12-05T11:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.985996 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:16.997158 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:16Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.007870 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.009496 4728 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.018082 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.025958 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.037863 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.047315 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.060703 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.070600 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079540 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079849 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079880 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079891 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079908 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.079920 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.092597 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.112234 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.131189 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.153241 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.164367 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.173596 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.181474 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.181513 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.181523 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.181538 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.181549 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.183210 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.201934 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.216335 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.227778 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.237772 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.246444 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.257361 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.283821 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.283854 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.283864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.283877 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.283886 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.351717 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:17 crc kubenswrapper[4728]: E1205 11:08:17.351950 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.385530 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.385571 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.385580 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.385594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.385604 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.487388 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.487456 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.487473 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.487494 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.487511 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.589496 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.589536 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.589547 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.589562 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.589572 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.691816 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.691860 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.691875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.691894 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.691906 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.722283 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.732622 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.743454 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.758876 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.773925 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.792881 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.794234 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.794279 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.794290 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.794307 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.794320 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.814594 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.817103 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.827381 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.841952 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.852491 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.863274 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.874573 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.885129 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.896305 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.896343 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.896355 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.896370 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.896381 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.897878 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.909077 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.922415 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:17Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.999486 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.999557 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.999582 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.999618 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:17 crc kubenswrapper[4728]: I1205 11:08:17.999685 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:17Z","lastTransitionTime":"2025-12-05T11:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.102598 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.102662 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.102680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.102707 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.102733 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.206573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.206623 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.206632 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.206648 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.206657 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.309952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.310038 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.310065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.310097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.310120 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.351488 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.351556 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:18 crc kubenswrapper[4728]: E1205 11:08:18.351655 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:18 crc kubenswrapper[4728]: E1205 11:08:18.351826 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.412963 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.413083 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.413097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.413117 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.413133 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.516201 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.516283 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.516305 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.516335 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.516358 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.545703 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk"] Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.546151 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.548253 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.548712 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.564653 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.587489 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.601180 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.617156 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.619407 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.619470 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.619490 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.619514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.619532 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.634234 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.639651 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.639841 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.639930 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.640050 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqbq5\" (UniqueName: \"kubernetes.io/projected/f26e1600-9bcd-497a-b875-7eaed5b6fba8-kube-api-access-fqbq5\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.654153 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.673278 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.686287 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.697432 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.710388 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.723374 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.723470 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.723490 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.723531 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.723547 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.729759 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.741424 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.741486 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqbq5\" (UniqueName: \"kubernetes.io/projected/f26e1600-9bcd-497a-b875-7eaed5b6fba8-kube-api-access-fqbq5\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.741550 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.741608 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.742128 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.742231 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.742613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.748143 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f26e1600-9bcd-497a-b875-7eaed5b6fba8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.756477 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.757979 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqbq5\" (UniqueName: \"kubernetes.io/projected/f26e1600-9bcd-497a-b875-7eaed5b6fba8-kube-api-access-fqbq5\") pod \"ovnkube-control-plane-749d76644c-n2qgk\" (UID: \"f26e1600-9bcd-497a-b875-7eaed5b6fba8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.771932 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.784481 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.799813 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:18Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.820962 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.825583 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.825620 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.825635 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.825653 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.825665 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.868013 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" Dec 05 11:08:18 crc kubenswrapper[4728]: W1205 11:08:18.886749 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf26e1600_9bcd_497a_b875_7eaed5b6fba8.slice/crio-8ac0fe147170d58d67c22feabcb7079ed2d8c71cce9168a417ac73619b1dbc09 WatchSource:0}: Error finding container 8ac0fe147170d58d67c22feabcb7079ed2d8c71cce9168a417ac73619b1dbc09: Status 404 returned error can't find the container with id 8ac0fe147170d58d67c22feabcb7079ed2d8c71cce9168a417ac73619b1dbc09 Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.938228 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.938278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.938292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.938312 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:18 crc kubenswrapper[4728]: I1205 11:08:18.938328 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:18Z","lastTransitionTime":"2025-12-05T11:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.041102 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.041138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.041149 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.041166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.041177 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.143871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.143913 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.143923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.143937 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.143947 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.246649 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.246685 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.246693 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.246705 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.246713 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.349690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.349742 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.349759 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.349782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.349829 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.351475 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.351618 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.452363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.452401 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.452415 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.452435 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.452451 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.489114 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.489154 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.489167 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.489187 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.489198 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.504322 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.508936 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.508972 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.508984 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.509018 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.509030 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.524291 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.528489 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.528542 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.528558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.528579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.528593 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.542531 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.545866 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.545915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.545930 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.545949 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.545961 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.559748 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.563869 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.563924 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.563952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.563977 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.563994 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.582466 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: E1205 11:08:19.582647 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.584404 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.584449 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.584460 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.584476 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.584487 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.687380 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.687413 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.687423 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.687435 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.687445 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.790054 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.790089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.790097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.790111 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.790121 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.826894 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/0.log" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.830020 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155" exitCode=1 Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.830071 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.830650 4728 scope.go:117] "RemoveContainer" containerID="a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.832022 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" event={"ID":"f26e1600-9bcd-497a-b875-7eaed5b6fba8","Type":"ContainerStarted","Data":"b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.832078 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" event={"ID":"f26e1600-9bcd-497a-b875-7eaed5b6fba8","Type":"ContainerStarted","Data":"8ac0fe147170d58d67c22feabcb7079ed2d8c71cce9168a417ac73619b1dbc09"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.847632 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.862461 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.881848 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.893144 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.893214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.893237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.893266 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.893288 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.901339 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.921607 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.934402 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.946326 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.956877 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.969917 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.987112 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:19Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.995434 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.995471 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.995482 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.995499 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:19 crc kubenswrapper[4728]: I1205 11:08:19.995511 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:19Z","lastTransitionTime":"2025-12-05T11:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.007949 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.020293 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.041194 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.055924 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.068049 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.087967 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.096747 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-2dq9w"] Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097311 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.097370 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097811 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097850 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097860 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.097885 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.110776 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.123029 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.134846 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.145772 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.155368 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq68g\" (UniqueName: \"kubernetes.io/projected/99a5c711-5c13-4615-93fc-9fbf02ce54ca-kube-api-access-rq68g\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.155415 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.158385 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.169318 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.181261 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.193342 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.200673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.200718 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.200730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.200746 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.200757 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.208966 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.223273 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.247504 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.256497 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq68g\" (UniqueName: \"kubernetes.io/projected/99a5c711-5c13-4615-93fc-9fbf02ce54ca-kube-api-access-rq68g\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.256554 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.256679 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.256730 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:20.756716876 +0000 UTC m=+34.898839569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.265913 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.286041 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq68g\" (UniqueName: \"kubernetes.io/projected/99a5c711-5c13-4615-93fc-9fbf02ce54ca-kube-api-access-rq68g\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.286112 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.297861 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.302784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.302847 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.302858 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.302875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.302887 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.311148 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.323308 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.342199 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.351763 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.351830 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.351948 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.352045 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.404999 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.405041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.405051 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.405066 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.405082 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.507231 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.507262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.507270 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.507283 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.507292 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.609775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.609841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.609850 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.609865 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.609874 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.712568 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.712613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.712624 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.712640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.712652 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.761610 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.761850 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:20 crc kubenswrapper[4728]: E1205 11:08:20.761920 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:21.76190281 +0000 UTC m=+35.904025503 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.815900 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.815962 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.815985 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.816013 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.816036 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.837570 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/0.log" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.841376 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.841517 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.843952 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" event={"ID":"f26e1600-9bcd-497a-b875-7eaed5b6fba8","Type":"ContainerStarted","Data":"e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.854344 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.866296 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.880038 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.893157 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.909040 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.918262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.918316 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.918335 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.918360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.918380 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:20Z","lastTransitionTime":"2025-12-05T11:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.924376 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.940220 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.955235 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.971299 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.984517 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:20 crc kubenswrapper[4728]: I1205 11:08:20.997538 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:20Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.011868 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.021281 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.021310 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.021319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.021333 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.021342 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.043373 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.065305 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.081791 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.098748 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.116328 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.124072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.124103 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.124111 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.124124 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.124133 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.130499 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.141759 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.153062 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.168622 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.182081 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.195567 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.211817 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.231626 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.231677 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.231691 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.231710 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.231723 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.232422 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.243929 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.260548 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.288224 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.303749 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.317154 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.334478 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.334933 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.335019 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.335035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.335059 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.335075 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.351182 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.351181 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.351344 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.374531 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.394120 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.437864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.438203 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.438215 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.438234 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.438246 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.541149 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.541193 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.541204 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.541224 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.541237 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.643922 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.643964 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.643975 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.643991 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.644000 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.746787 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.746877 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.746893 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.746916 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.746931 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.772855 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.773079 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.773188 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:23.77316244 +0000 UTC m=+37.915285183 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849090 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849115 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849141 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849150 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.849464 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/1.log" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.850040 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/0.log" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.852155 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e" exitCode=1 Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.852232 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.852280 4728 scope.go:117] "RemoveContainer" containerID="a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.853081 4728 scope.go:117] "RemoveContainer" containerID="2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e" Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.853286 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.865419 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.873714 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.873843 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.873883 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.873951 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:08:37.873922511 +0000 UTC m=+52.016045244 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.873952 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.873984 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.874035 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:37.874022443 +0000 UTC m=+52.016145146 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.874060 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:37.874049694 +0000 UTC m=+52.016172487 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.882038 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.899019 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.929156 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.942191 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.951073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.951339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.951408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.951581 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.951669 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:21Z","lastTransitionTime":"2025-12-05T11:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.955213 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.968671 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.974921 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.975033 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975504 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975544 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975560 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975622 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:37.975602233 +0000 UTC m=+52.117724966 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975781 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975898 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.975963 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:21 crc kubenswrapper[4728]: E1205 11:08:21.976062 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:08:37.976048744 +0000 UTC m=+52.118171437 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:21 crc kubenswrapper[4728]: I1205 11:08:21.981352 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:21Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.003258 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.017701 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.032276 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.049507 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.053746 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.053835 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.053853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.053875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.053891 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.065370 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.086095 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.102333 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.117225 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.133260 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:22Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.156314 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.156389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.156408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.156427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.156441 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.259408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.259484 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.259503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.259535 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.259560 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.351564 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.351608 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.351576 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:22 crc kubenswrapper[4728]: E1205 11:08:22.351716 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:22 crc kubenswrapper[4728]: E1205 11:08:22.351785 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:22 crc kubenswrapper[4728]: E1205 11:08:22.351873 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.361498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.361673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.361753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.361844 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.361919 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.463945 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.463992 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.464009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.464028 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.464043 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.566425 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.566494 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.566506 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.566545 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.566559 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.670110 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.670183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.670207 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.670262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.670284 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.773330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.773375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.773389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.773406 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.773420 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.858841 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/1.log" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.876510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.876588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.876613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.876640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.876662 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.979988 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.980041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.980052 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.980071 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:22 crc kubenswrapper[4728]: I1205 11:08:22.980082 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:22Z","lastTransitionTime":"2025-12-05T11:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.083075 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.083126 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.083135 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.083148 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.083159 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.185726 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.185837 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.185856 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.185881 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.185898 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.289344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.289408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.289430 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.289456 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.289476 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.352018 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:23 crc kubenswrapper[4728]: E1205 11:08:23.352240 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.392619 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.392695 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.392718 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.392749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.392769 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.495777 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.495861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.495882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.495958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.495996 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.598413 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.598488 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.598507 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.598533 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.598550 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.702053 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.702103 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.702115 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.702133 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.702144 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.794250 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:23 crc kubenswrapper[4728]: E1205 11:08:23.794423 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:23 crc kubenswrapper[4728]: E1205 11:08:23.794527 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:27.794502461 +0000 UTC m=+41.936625184 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.805080 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.805180 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.805201 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.805264 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.805281 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.908362 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.908421 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.908433 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.908453 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:23 crc kubenswrapper[4728]: I1205 11:08:23.908465 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:23Z","lastTransitionTime":"2025-12-05T11:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.010718 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.010807 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.010820 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.010837 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.010851 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.113512 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.113597 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.113630 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.113661 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.113681 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.218682 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.218741 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.218751 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.218769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.218784 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.322074 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.322135 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.322152 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.322177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.322194 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.351536 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.351646 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.351707 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:24 crc kubenswrapper[4728]: E1205 11:08:24.352003 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:24 crc kubenswrapper[4728]: E1205 11:08:24.352139 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:24 crc kubenswrapper[4728]: E1205 11:08:24.352264 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.425255 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.425316 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.425334 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.425358 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.425377 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.527729 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.527774 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.527782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.527827 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.527839 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.630851 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.630920 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.630938 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.630966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.630986 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.734065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.734126 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.734134 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.734148 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.734157 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.837074 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.837146 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.837169 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.837200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.837221 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.940531 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.940578 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.940591 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.940608 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:24 crc kubenswrapper[4728]: I1205 11:08:24.940619 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:24Z","lastTransitionTime":"2025-12-05T11:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.043448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.043505 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.043520 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.043540 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.043556 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.146782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.146835 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.146843 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.146856 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.146864 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.249671 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.249710 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.249753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.249773 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.249787 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.351017 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:25 crc kubenswrapper[4728]: E1205 11:08:25.351222 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.351992 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.352049 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.352065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.352089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.352105 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.455902 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.455948 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.455959 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.455977 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.455990 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.559104 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.559147 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.559158 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.559176 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.559190 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.662509 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.662570 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.662587 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.662611 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.662629 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.765848 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.765927 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.765952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.765982 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.766000 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.868939 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.868979 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.868989 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.869007 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.869017 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.971015 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.971046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.971054 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.971066 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:25 crc kubenswrapper[4728]: I1205 11:08:25.971075 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:25Z","lastTransitionTime":"2025-12-05T11:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.074177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.074273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.074296 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.074330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.074362 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.177212 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.177268 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.177281 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.177300 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.177314 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.280600 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.280690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.280702 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.280727 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.280739 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.351346 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:26 crc kubenswrapper[4728]: E1205 11:08:26.351502 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.351535 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:26 crc kubenswrapper[4728]: E1205 11:08:26.351655 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.351682 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:26 crc kubenswrapper[4728]: E1205 11:08:26.351759 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.371601 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.382671 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.382722 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.382737 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.382758 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.382773 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.386503 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.410149 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3211044523f003ebc4affc775f3a904936bda90c1502fc2f9a2b5f2bd7f9155\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:19Z\\\",\\\"message\\\":\\\"g reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 11:08:17.754182 5987 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754368 5987 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 11:08:17.754604 5987 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754676 5987 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.754697 5987 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 11:08:17.755040 5987 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 11:08:17.755054 5987 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 11:08:17.755077 5987 factory.go:656] Stopping watch factory\\\\nI1205 11:08:17.755090 5987 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:17.755107 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.424006 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.434789 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.444627 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.458303 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.480596 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.485176 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.485236 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.485245 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.485258 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.485270 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.496676 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.510447 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.522834 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.534812 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.553879 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.583126 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.588969 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.589265 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.589398 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.589531 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.589611 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.603954 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.620143 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.635300 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:26Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.692073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.692652 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.692672 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.692686 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.692696 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.795710 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.795836 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.795853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.795879 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.795893 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.898073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.898157 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.898172 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.898189 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:26 crc kubenswrapper[4728]: I1205 11:08:26.898264 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:26Z","lastTransitionTime":"2025-12-05T11:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.002143 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.002185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.002197 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.002216 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.002227 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.105438 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.105510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.105534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.105564 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.105588 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.208737 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.208831 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.208859 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.208890 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.208911 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.311437 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.311498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.311506 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.311520 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.311548 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.351510 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:27 crc kubenswrapper[4728]: E1205 11:08:27.351676 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.413492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.413550 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.413560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.413576 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.413589 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.516939 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.517001 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.517016 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.517038 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.517052 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.619628 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.619697 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.619708 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.619721 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.619731 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.722673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.722754 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.722779 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.722863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.722888 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.825384 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.825425 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.825434 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.825449 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.825460 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.839327 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:27 crc kubenswrapper[4728]: E1205 11:08:27.839465 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:27 crc kubenswrapper[4728]: E1205 11:08:27.839536 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:35.839514619 +0000 UTC m=+49.981637322 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.928670 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.928753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.928769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.928812 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:27 crc kubenswrapper[4728]: I1205 11:08:27.928825 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:27Z","lastTransitionTime":"2025-12-05T11:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.031485 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.031530 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.031543 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.031558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.031569 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.134208 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.134289 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.134307 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.134332 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.134354 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.237612 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.237681 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.237715 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.237758 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.237783 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.340879 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.340934 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.340948 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.340971 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.340987 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.351600 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.351652 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:28 crc kubenswrapper[4728]: E1205 11:08:28.351740 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.351600 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:28 crc kubenswrapper[4728]: E1205 11:08:28.351906 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:28 crc kubenswrapper[4728]: E1205 11:08:28.352031 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.444252 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.444309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.444329 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.444439 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.444466 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.546428 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.546465 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.546475 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.546492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.546504 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.649035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.649096 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.649107 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.649123 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.649133 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.751835 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.751871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.751900 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.751915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.751925 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.854141 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.854186 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.854198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.854215 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.854225 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.956683 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.956716 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.956732 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.956753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:28 crc kubenswrapper[4728]: I1205 11:08:28.956763 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:28Z","lastTransitionTime":"2025-12-05T11:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.059301 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.059363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.059374 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.059390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.059401 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.161885 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.161928 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.161940 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.161955 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.161967 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.264763 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.264854 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.264871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.264891 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.264906 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.351960 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.352129 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.366830 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.366865 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.366875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.366890 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.366900 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.469611 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.469674 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.469690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.469715 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.469731 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.576850 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.576908 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.576918 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.576931 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.576940 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.678986 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.679031 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.679042 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.679058 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.679069 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.781726 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.781785 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.781815 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.781834 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.781845 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.806745 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.806814 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.806824 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.806838 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.806850 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.822853 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.827659 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.827719 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.827736 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.827762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.827776 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.841981 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.847163 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.847201 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.847210 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.847225 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.847235 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.861488 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.865348 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.866023 4728 scope.go:117] "RemoveContainer" containerID="2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e" Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.866162 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.866980 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.867017 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.867045 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.867058 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.867067 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.880345 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.880401 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.885840 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.885884 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.885895 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.885911 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.885925 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.894178 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.897535 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: E1205 11:08:29.897644 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.899130 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.899185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.899198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.899214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.899225 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:29Z","lastTransitionTime":"2025-12-05T11:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.910306 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.924349 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.937811 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.950599 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.966290 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.976646 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.987285 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:29 crc kubenswrapper[4728]: I1205 11:08:29.996341 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:29Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.001047 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.001071 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.001080 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.001092 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.001102 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.006232 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.016468 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.032191 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.050124 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.060390 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.072475 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.083349 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:30Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.102876 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.102923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.102933 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.102951 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.102963 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.205532 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.205599 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.205617 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.205642 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.205661 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.309138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.309396 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.309528 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.309615 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.309697 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.351823 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.351834 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:30 crc kubenswrapper[4728]: E1205 11:08:30.351969 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.352042 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:30 crc kubenswrapper[4728]: E1205 11:08:30.352131 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:30 crc kubenswrapper[4728]: E1205 11:08:30.352192 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.411959 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.412009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.412019 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.412036 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.412047 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.515034 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.515090 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.515106 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.515131 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.515147 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.618686 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.618739 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.618756 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.618781 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.618831 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.722980 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.723046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.723068 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.723097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.723119 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.826590 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.826666 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.826684 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.826724 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.826764 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.930101 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.930151 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.930163 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.930183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:30 crc kubenswrapper[4728]: I1205 11:08:30.930199 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:30Z","lastTransitionTime":"2025-12-05T11:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.032914 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.032952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.032974 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.032989 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.032999 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.136477 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.136535 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.136557 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.136585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.136608 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.239665 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.239735 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.239760 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.239786 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.239843 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.343050 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.343122 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.343139 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.343619 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.343690 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.351656 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:31 crc kubenswrapper[4728]: E1205 11:08:31.351853 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.448120 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.448192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.448226 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.448256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.448276 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.551508 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.551554 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.551568 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.551625 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.551647 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.653465 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.653548 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.653572 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.653601 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.653624 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.756542 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.756588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.756647 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.756667 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.756678 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.859229 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.859265 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.859278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.859294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.859305 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.962558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.962633 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.962651 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.962681 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:31 crc kubenswrapper[4728]: I1205 11:08:31.962709 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:31Z","lastTransitionTime":"2025-12-05T11:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.066004 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.066157 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.066177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.066213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.066231 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.169904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.169943 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.169954 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.169971 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.169980 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.273160 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.273233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.273256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.273287 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.273311 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.351455 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.351509 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:32 crc kubenswrapper[4728]: E1205 11:08:32.351612 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:32 crc kubenswrapper[4728]: E1205 11:08:32.351711 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.351956 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:32 crc kubenswrapper[4728]: E1205 11:08:32.352243 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.376421 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.376465 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.376475 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.376490 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.376500 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.479861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.479940 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.479961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.479987 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.480003 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.583057 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.583135 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.583156 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.583179 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.583196 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.686193 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.686258 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.686275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.686302 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.686320 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.789284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.789332 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.789348 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.789369 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.789386 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.891743 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.891786 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.891819 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.891834 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.891845 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.994681 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.994748 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.994766 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.994833 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:32 crc kubenswrapper[4728]: I1205 11:08:32.994858 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:32Z","lastTransitionTime":"2025-12-05T11:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.097592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.097670 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.097692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.097725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.097747 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.200541 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.200600 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.200616 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.200640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.200658 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.302960 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.303012 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.303024 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.303041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.303055 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.351469 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:33 crc kubenswrapper[4728]: E1205 11:08:33.351670 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.406225 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.406294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.406306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.406321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.406354 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.508843 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.508946 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.508969 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.508999 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.509023 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.612194 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.612243 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.612259 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.612286 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.612303 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.714928 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.714991 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.715010 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.715034 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.715053 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.817446 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.817500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.817521 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.817555 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.817578 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.920138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.920202 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.920243 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.920278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:33 crc kubenswrapper[4728]: I1205 11:08:33.920302 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:33Z","lastTransitionTime":"2025-12-05T11:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.023847 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.023896 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.023917 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.023952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.023986 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.126594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.126640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.126656 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.126678 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.126694 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.228641 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.228690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.228707 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.228730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.228747 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.331770 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.331854 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.331876 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.331904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.331925 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.351755 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.351880 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.351996 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:34 crc kubenswrapper[4728]: E1205 11:08:34.351997 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:34 crc kubenswrapper[4728]: E1205 11:08:34.352193 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:34 crc kubenswrapper[4728]: E1205 11:08:34.352365 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.435331 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.435404 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.435428 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.435458 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.435478 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.538510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.538574 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.538596 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.538624 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.538647 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.640985 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.641060 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.641082 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.641109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.641129 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.743700 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.743761 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.743784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.743851 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.743873 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.847022 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.847082 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.847094 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.847112 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.847123 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.950274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.950334 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.950346 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.950367 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:34 crc kubenswrapper[4728]: I1205 11:08:34.950382 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:34Z","lastTransitionTime":"2025-12-05T11:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.053572 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.053630 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.053648 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.053673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.053691 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.155740 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.155769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.155777 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.155805 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.155813 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.258212 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.258273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.258292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.258315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.258332 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.351479 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:35 crc kubenswrapper[4728]: E1205 11:08:35.351638 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.360910 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.360971 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.360989 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.361016 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.361032 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.463215 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.463261 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.463272 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.463287 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.463299 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.565624 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.565681 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.565697 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.565719 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.565738 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.669206 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.669243 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.669256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.669272 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.669282 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.772182 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.772228 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.772240 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.772258 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.772272 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.875065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.875109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.875120 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.875137 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.875148 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.922011 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:35 crc kubenswrapper[4728]: E1205 11:08:35.922152 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:35 crc kubenswrapper[4728]: E1205 11:08:35.922219 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:08:51.922201107 +0000 UTC m=+66.064323800 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.978041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.978141 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.978190 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.978216 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:35 crc kubenswrapper[4728]: I1205 11:08:35.978233 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:35Z","lastTransitionTime":"2025-12-05T11:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.081299 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.081359 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.081375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.081399 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.081414 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.184198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.184253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.184266 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.184282 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.184298 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.287013 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.287080 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.287092 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.287109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.287121 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.351617 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.351767 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:36 crc kubenswrapper[4728]: E1205 11:08:36.351931 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.351971 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:36 crc kubenswrapper[4728]: E1205 11:08:36.352055 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:36 crc kubenswrapper[4728]: E1205 11:08:36.352148 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.368560 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.390085 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.390125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.390134 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.390149 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.390162 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.401147 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.416196 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.431042 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.446128 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.459838 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.476151 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.493080 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.494274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.494306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.494320 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.494336 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.494350 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.506846 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.523753 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.539171 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.550565 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.563730 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.581757 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.594273 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.596919 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.596982 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.596995 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.597008 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.597018 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.604985 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.615806 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:36Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.699534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.699570 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.699579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.699594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.699605 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.803039 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.803089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.803105 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.803125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.803143 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.905601 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.905680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.905693 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.905711 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:36 crc kubenswrapper[4728]: I1205 11:08:36.905723 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:36Z","lastTransitionTime":"2025-12-05T11:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.007966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.008007 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.008020 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.008039 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.008054 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.110914 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.110961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.110979 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.111001 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.111019 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.214203 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.214257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.214273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.214295 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.214310 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.317000 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.317067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.317077 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.317093 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.317103 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.351630 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.351841 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.419935 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.419968 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.419980 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.419997 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.420008 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.522180 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.522237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.522253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.522278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.522295 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.624660 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.624701 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.624713 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.624730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.624739 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.726955 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.727018 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.727036 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.727067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.727090 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.829684 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.829724 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.829734 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.829750 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.829760 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.932288 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.932343 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.932351 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.932366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.932377 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:37Z","lastTransitionTime":"2025-12-05T11:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.940711 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.940838 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:37 crc kubenswrapper[4728]: I1205 11:08:37.940914 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.940990 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:09:09.940971486 +0000 UTC m=+84.083094179 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.941029 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.941059 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.941127 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:09:09.941102369 +0000 UTC m=+84.083225092 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:08:37 crc kubenswrapper[4728]: E1205 11:08:37.941158 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:09:09.9411425 +0000 UTC m=+84.083265223 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.034463 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.034502 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.034513 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.034529 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.034540 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.041974 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.042052 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042205 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042228 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042246 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042315 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:09:10.042294579 +0000 UTC m=+84.184417292 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042343 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042400 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042416 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.042480 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:09:10.042459383 +0000 UTC m=+84.184582126 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.136987 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.137029 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.137038 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.137053 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.137063 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.240200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.240255 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.240272 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.240295 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.240311 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.342503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.342539 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.342548 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.342565 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.342577 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.351433 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.351448 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.351569 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.351695 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.352807 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:38 crc kubenswrapper[4728]: E1205 11:08:38.352762 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.445247 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.445289 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.445300 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.445316 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.445326 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.549167 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.549240 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.549264 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.549292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.549314 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.651948 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.651989 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.652003 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.652022 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.652034 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.754062 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.754106 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.754117 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.754133 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.754144 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.856452 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.856504 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.856521 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.856543 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.856559 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.959268 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.959324 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.959335 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.959353 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:38 crc kubenswrapper[4728]: I1205 11:08:38.959366 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:38Z","lastTransitionTime":"2025-12-05T11:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.061462 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.061504 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.061514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.061527 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.061543 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.164503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.164549 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.164560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.164577 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.164589 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.267484 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.267550 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.267573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.267602 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.267624 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.352023 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:39 crc kubenswrapper[4728]: E1205 11:08:39.352213 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.370192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.370250 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.370267 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.370291 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.370308 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.473861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.473942 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.473964 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.473993 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.474017 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.576668 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.576719 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.576732 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.576749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.576761 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.679089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.679160 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.679182 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.679213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.679240 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.782415 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.782485 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.782503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.782532 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.782546 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.885885 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.885955 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.885972 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.885997 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.886015 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.979663 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.979725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.979740 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.979762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:39 crc kubenswrapper[4728]: I1205 11:08:39.979775 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:39Z","lastTransitionTime":"2025-12-05T11:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:39 crc kubenswrapper[4728]: E1205 11:08:39.996869 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:39Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.002222 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.002401 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.002427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.002501 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.002528 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.022115 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.026370 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.026414 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.026430 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.026450 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.026466 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.041052 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.045749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.045775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.045783 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.045827 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.045843 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.060431 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.063840 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.063877 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.063890 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.063907 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.063919 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.081587 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.081756 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.083713 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.083762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.083805 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.083826 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.083843 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.186858 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.186924 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.186937 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.186958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.186971 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.290325 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.290386 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.290408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.290437 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.290456 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.351780 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.351857 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.351955 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.352013 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.352191 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:40 crc kubenswrapper[4728]: E1205 11:08:40.352288 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.393327 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.393388 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.393406 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.393470 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.393490 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.496297 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.496349 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.496362 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.496382 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.496396 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.599548 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.599584 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.599592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.599606 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.599616 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.624084 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.639640 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.650873 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.667772 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.687461 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.701929 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.701966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.701974 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.701988 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.701996 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.704029 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.717976 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.731512 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.747174 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.769021 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.782918 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.798133 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.805351 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.805396 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.805411 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.805432 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.805449 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.810093 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.823937 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.840785 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.862514 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.880780 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.899168 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.908083 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.908138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.908157 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.908178 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.908195 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:40Z","lastTransitionTime":"2025-12-05T11:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:40 crc kubenswrapper[4728]: I1205 11:08:40.918738 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:40Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.011385 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.011445 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.011461 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.011482 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.011499 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.114753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.114839 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.114859 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.114882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.114899 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.217402 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.217453 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.217476 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.217500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.217518 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.320529 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.320596 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.320610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.320629 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.320644 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.351003 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:41 crc kubenswrapper[4728]: E1205 11:08:41.351153 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.423894 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.423949 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.423962 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.423986 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.423999 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.530087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.530588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.530868 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.531110 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.531199 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.635782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.635854 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.635864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.635887 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.635899 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.738839 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.738917 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.738931 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.738956 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.738972 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.842363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.842419 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.842430 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.842453 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.842466 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.946192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.946291 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.946310 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.946339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:41 crc kubenswrapper[4728]: I1205 11:08:41.946361 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:41Z","lastTransitionTime":"2025-12-05T11:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.049236 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.049302 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.049317 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.049344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.049359 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.153002 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.153090 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.153109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.153131 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.153148 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.256304 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.256358 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.256373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.256393 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.256409 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.351816 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.351922 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:42 crc kubenswrapper[4728]: E1205 11:08:42.351979 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.352032 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:42 crc kubenswrapper[4728]: E1205 11:08:42.352387 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:42 crc kubenswrapper[4728]: E1205 11:08:42.352441 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.359177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.359232 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.359252 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.359276 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.359295 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.462262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.462321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.462330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.462350 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.462364 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.565041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.565097 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.565113 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.565137 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.565155 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.668204 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.668275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.668292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.668317 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.668334 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.771198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.771273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.771295 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.771323 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.771343 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.874715 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.874863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.874888 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.874940 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.874966 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.977863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.977952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.977974 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.978002 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:42 crc kubenswrapper[4728]: I1205 11:08:42.978024 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:42Z","lastTransitionTime":"2025-12-05T11:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.081006 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.081079 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.081102 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.081129 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.081147 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.183839 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.183893 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.183904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.183921 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.183931 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.286177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.286248 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.286271 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.286297 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.286315 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.351614 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:43 crc kubenswrapper[4728]: E1205 11:08:43.351843 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.388634 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.388697 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.388709 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.388725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.388738 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.490720 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.490759 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.490770 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.490786 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.490811 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.594242 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.594320 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.594343 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.594372 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.594393 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.698649 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.698713 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.698730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.698753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.698773 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.801426 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.801464 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.801485 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.801500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.801510 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.903675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.903710 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.903720 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.903734 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:43 crc kubenswrapper[4728]: I1205 11:08:43.903744 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:43Z","lastTransitionTime":"2025-12-05T11:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.006164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.006219 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.006230 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.006249 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.006260 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.110505 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.110601 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.110621 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.110649 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.110662 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.214588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.214637 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.214650 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.214667 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.214680 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.317387 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.317452 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.317462 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.317478 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.317488 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.351218 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.351252 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.351252 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:44 crc kubenswrapper[4728]: E1205 11:08:44.351371 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:44 crc kubenswrapper[4728]: E1205 11:08:44.351457 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:44 crc kubenswrapper[4728]: E1205 11:08:44.351633 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.420215 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.420259 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.420274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.420292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.420305 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.522923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.522956 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.522965 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.522978 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.522987 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.624786 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.624852 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.624864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.624881 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.624893 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.728178 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.728233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.728244 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.728262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.728277 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.830698 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.830752 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.830762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.830778 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.830810 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.933086 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.933130 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.933140 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.933154 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:44 crc kubenswrapper[4728]: I1205 11:08:44.933163 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:44Z","lastTransitionTime":"2025-12-05T11:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.035822 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.035926 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.035938 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.035958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.035969 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.138705 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.138753 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.138764 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.138782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.138817 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.241743 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.241820 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.241835 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.241860 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.241878 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.345315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.345352 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.345360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.345375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.345385 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.351978 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:45 crc kubenswrapper[4728]: E1205 11:08:45.352563 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.353105 4728 scope.go:117] "RemoveContainer" containerID="2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.448453 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.448888 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.448910 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.448939 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.448960 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.552294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.552350 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.552366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.552393 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.552409 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.655032 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.655064 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.655071 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.655086 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.655094 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.758146 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.758174 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.758181 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.758193 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.758202 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.860021 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.860059 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.860067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.860081 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.860090 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.938658 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/1.log" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.941457 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.941913 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.960694 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:45Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.961993 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.962019 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.962027 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.962041 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.962050 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:45Z","lastTransitionTime":"2025-12-05T11:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.973202 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:45Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:45 crc kubenswrapper[4728]: I1205 11:08:45.985099 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:45Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.003049 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:45Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.019007 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.034754 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.046994 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.066469 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.069107 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.069131 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.069142 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.069156 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.069165 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.081399 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.099461 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.112861 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.128215 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.144805 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.158875 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.171536 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.171585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.171598 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.171615 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.171627 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.174809 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.187483 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.202446 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.213916 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.274306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.274339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.274348 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.274361 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.274371 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.351673 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.351673 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:46 crc kubenswrapper[4728]: E1205 11:08:46.351882 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.351943 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:46 crc kubenswrapper[4728]: E1205 11:08:46.352063 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:46 crc kubenswrapper[4728]: E1205 11:08:46.352126 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.366634 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.378220 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.378269 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.378281 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.378300 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.378314 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.383121 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.402494 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.428011 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.442047 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.454990 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.476154 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.480557 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.480595 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.480610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.480656 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.480671 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.500078 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.518617 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.549417 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.564314 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.578441 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.582590 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.582631 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.582642 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.582660 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.582671 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.591561 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.603592 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.617967 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.630838 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.644521 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.660420 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.685366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.685614 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.685742 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.685869 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.685963 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.788582 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.788642 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.788657 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.788678 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.788693 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.891621 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.891675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.891690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.891710 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.891722 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.946325 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/2.log" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.946942 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/1.log" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.950135 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" exitCode=1 Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.950184 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.950274 4728 scope.go:117] "RemoveContainer" containerID="2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.950884 4728 scope.go:117] "RemoveContainer" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" Dec 05 11:08:46 crc kubenswrapper[4728]: E1205 11:08:46.951060 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.975771 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.993712 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.993757 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.993767 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.993783 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.993815 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:46Z","lastTransitionTime":"2025-12-05T11:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:46 crc kubenswrapper[4728]: I1205 11:08:46.994197 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:46Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.011542 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.025638 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.049214 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.065060 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.082666 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.096864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.096968 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.096985 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.097030 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.097048 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.097890 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.114511 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.130039 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.144489 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.165494 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a80880d77c415df6c5a75840e5c7a8bc26c41913ea49d22fef48ecb50beda8e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:21Z\\\",\\\"message\\\":\\\"[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:20.634876 6208 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-zpkw4\\\\nF1205 11:08:20.635951 6208 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.179232 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.192407 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.199424 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.199484 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.199499 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.199519 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.199530 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.206874 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.219351 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.235936 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.253478 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.305838 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.305890 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.305903 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.305926 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.305943 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.351860 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:47 crc kubenswrapper[4728]: E1205 11:08:47.351997 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.409205 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.409234 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.409422 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.409435 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.409446 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.512326 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.512377 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.512390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.512406 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.512419 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.615315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.615360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.615372 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.615389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.615400 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.718446 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.718546 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.718574 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.718609 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.718649 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.821419 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.821472 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.821485 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.821503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.821516 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.923775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.924032 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.924100 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.924166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.924222 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:47Z","lastTransitionTime":"2025-12-05T11:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.954962 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/2.log" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.958313 4728 scope.go:117] "RemoveContainer" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" Dec 05 11:08:47 crc kubenswrapper[4728]: E1205 11:08:47.958542 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.972156 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:47 crc kubenswrapper[4728]: I1205 11:08:47.988458 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.000224 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:47Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.018706 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.026347 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.026377 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.026387 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.026404 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.026417 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.032893 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.051254 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.064327 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.079183 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.092444 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.110378 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.128060 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.131156 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.131280 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.131369 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.131455 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.131539 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.158739 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.176170 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.190942 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.213680 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.230015 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.235237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.236151 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.236173 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.236192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.236431 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.246858 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.266066 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:48Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.341544 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.341708 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.341721 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.341754 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.341767 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.351321 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.351358 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:48 crc kubenswrapper[4728]: E1205 11:08:48.354678 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:48 crc kubenswrapper[4728]: E1205 11:08:48.354510 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.351458 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:48 crc kubenswrapper[4728]: E1205 11:08:48.354813 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.444145 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.444213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.444237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.444273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.444295 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.546927 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.546958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.546965 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.546977 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.546986 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.650074 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.650134 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.650153 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.650180 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.650198 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.753313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.753354 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.753365 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.753381 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.753392 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.856560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.856616 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.856633 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.856655 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.856672 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.959690 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.959735 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.959747 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.959763 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:48 crc kubenswrapper[4728]: I1205 11:08:48.959776 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:48Z","lastTransitionTime":"2025-12-05T11:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.062383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.062433 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.062448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.062466 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.062479 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.165534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.165585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.165601 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.165624 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.165639 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.269257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.269296 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.269304 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.269319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.269327 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.352041 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:49 crc kubenswrapper[4728]: E1205 11:08:49.352275 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.372975 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.373036 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.373051 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.373078 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.373094 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.476446 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.476491 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.476505 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.476523 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.476535 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.578869 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.578914 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.578926 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.578944 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.578955 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.682429 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.682490 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.682502 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.682522 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.682535 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.784847 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.784895 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.784904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.784923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.784935 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.887087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.887167 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.887179 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.887199 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.887212 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.990417 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.990489 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.990503 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.990525 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:49 crc kubenswrapper[4728]: I1205 11:08:49.990536 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:49Z","lastTransitionTime":"2025-12-05T11:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.094063 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.094112 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.094123 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.094140 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.094150 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.194515 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.194560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.194608 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.194624 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.194634 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.209097 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:50Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.213113 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.213144 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.213154 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.213184 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.213196 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.226658 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:50Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.230639 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.230703 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.230720 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.230760 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.230819 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.245426 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:50Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.248957 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.249003 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.249012 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.249027 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.249036 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.262380 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:50Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.266824 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.266865 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.266875 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.266892 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.266907 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.285499 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:50Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.285662 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.287373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.287395 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.287405 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.287422 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.287435 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.351434 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.351479 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.351494 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.351571 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.351715 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:50 crc kubenswrapper[4728]: E1205 11:08:50.351762 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.389363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.389418 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.389427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.389440 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.389451 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.491395 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.491448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.491459 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.491476 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.491688 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.593619 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.593666 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.593677 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.593691 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.593699 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.695164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.695195 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.695202 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.695216 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.695224 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.801063 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.801116 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.801126 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.801147 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.801159 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.904643 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.904689 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.904702 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.904721 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:50 crc kubenswrapper[4728]: I1205 11:08:50.904732 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:50Z","lastTransitionTime":"2025-12-05T11:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.006446 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.006492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.006504 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.006523 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.006535 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.108861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.108886 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.108896 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.108910 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.108924 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.211772 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.211864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.211877 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.211892 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.211903 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.314163 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.314201 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.314209 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.314223 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.314232 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.351978 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:51 crc kubenswrapper[4728]: E1205 11:08:51.352137 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.417219 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.417273 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.417285 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.417306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.417319 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.520077 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.520137 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.520152 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.520172 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.520185 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.623222 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.623284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.623295 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.623332 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.623344 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.726309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.726391 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.726408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.726427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.726463 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.828826 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.828866 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.828874 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.828894 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.828906 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.931369 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.931410 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.931422 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.931439 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.931451 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:51Z","lastTransitionTime":"2025-12-05T11:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:51 crc kubenswrapper[4728]: I1205 11:08:51.986291 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:51 crc kubenswrapper[4728]: E1205 11:08:51.986471 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:51 crc kubenswrapper[4728]: E1205 11:08:51.986553 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:09:23.986535658 +0000 UTC m=+98.128658351 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.034573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.034621 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.034631 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.034649 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.034661 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.137373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.137420 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.137438 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.137462 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.137475 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.240919 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.241072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.241086 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.241105 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.241115 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.343425 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.343479 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.343491 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.343508 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.343521 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.351756 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.351862 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.351905 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:52 crc kubenswrapper[4728]: E1205 11:08:52.352042 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:52 crc kubenswrapper[4728]: E1205 11:08:52.352129 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:52 crc kubenswrapper[4728]: E1205 11:08:52.352256 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.446465 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.446497 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.446508 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.446522 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.446533 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.549176 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.549221 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.549232 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.549255 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.549267 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.651425 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.651500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.651526 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.651555 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.651576 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.755100 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.755138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.755148 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.755167 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.755177 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.857573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.857608 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.857622 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.857635 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.857644 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.959760 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.959834 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.959845 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.959864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.959877 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:52Z","lastTransitionTime":"2025-12-05T11:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.974064 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/0.log" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.974118 4728 generic.go:334] "Generic (PLEG): container finished" podID="f292da29-a632-47aa-8bcc-2d999eaa6c11" containerID="6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e" exitCode=1 Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.974146 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerDied","Data":"6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e"} Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.974521 4728 scope.go:117] "RemoveContainer" containerID="6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e" Dec 05 11:08:52 crc kubenswrapper[4728]: I1205 11:08:52.990882 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:52Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.015353 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.032711 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.043881 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.058387 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.061821 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.061853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.061864 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.061879 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.061890 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.069996 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.081772 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.092121 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.104690 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.118282 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.136571 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.148527 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.159069 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.164246 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.164282 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.164292 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.164308 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.164319 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.172340 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.188818 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.202969 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.217145 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.238697 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.266349 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.266385 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.266392 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.266406 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.266416 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.351245 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:53 crc kubenswrapper[4728]: E1205 11:08:53.351370 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.368730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.368769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.368777 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.368803 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.368820 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.471253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.471289 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.471303 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.471323 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.471337 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.573776 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.573834 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.573849 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.573866 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.573875 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.676155 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.676199 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.676212 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.676228 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.676237 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.778662 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.778711 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.778727 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.778749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.778763 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.880959 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.880996 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.881006 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.881024 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.881035 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.978342 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/0.log" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.978406 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerStarted","Data":"441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.983037 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.983079 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.983092 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.983107 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.983119 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:53Z","lastTransitionTime":"2025-12-05T11:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:53 crc kubenswrapper[4728]: I1205 11:08:53.992230 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:53Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.004307 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.017768 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.030974 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.044480 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.060521 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.072752 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.081975 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.084725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.084754 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.084764 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.084779 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.084805 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.093761 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.103659 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.117316 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.129738 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.146721 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.158379 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.178146 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.187015 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.187061 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.187073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.187089 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.187102 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.193147 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.205809 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.218610 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:54Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.289192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.289244 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.289255 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.289271 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.289282 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.351114 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.351157 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:54 crc kubenswrapper[4728]: E1205 11:08:54.351223 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.351343 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:54 crc kubenswrapper[4728]: E1205 11:08:54.351400 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:54 crc kubenswrapper[4728]: E1205 11:08:54.351587 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.390622 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.390863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.391017 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.391095 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.391159 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.493803 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.493848 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.493860 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.493877 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.493888 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.596442 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.596492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.596507 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.596529 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.596544 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.700286 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.700373 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.700390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.700695 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.700719 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.803252 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.803309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.803321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.803339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.803354 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.905733 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.905782 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.905811 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.905828 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:54 crc kubenswrapper[4728]: I1205 11:08:54.905839 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:54Z","lastTransitionTime":"2025-12-05T11:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.007906 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.007952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.007966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.007988 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.008002 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.109871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.109913 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.109942 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.109958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.109969 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.212285 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.212377 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.212387 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.212400 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.212408 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.314306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.314336 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.314344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.314356 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.314366 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.351707 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:55 crc kubenswrapper[4728]: E1205 11:08:55.351834 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.416313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.416345 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.416353 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.416366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.416375 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.519021 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.519061 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.519072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.519088 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.519100 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.621320 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.621355 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.621366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.621383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.621392 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.723266 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.723298 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.723309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.723321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.723331 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.825912 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.825941 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.825951 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.825966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.825978 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.928468 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.928540 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.928550 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.928564 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:55 crc kubenswrapper[4728]: I1205 11:08:55.928574 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:55Z","lastTransitionTime":"2025-12-05T11:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.031428 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.031483 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.031493 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.031513 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.031525 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.133898 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.133932 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.133940 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.133953 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.133981 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.236211 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.236249 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.236260 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.236277 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.236288 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.338944 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.339005 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.339017 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.339035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.339054 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.351063 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.351202 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:56 crc kubenswrapper[4728]: E1205 11:08:56.351290 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.351384 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:56 crc kubenswrapper[4728]: E1205 11:08:56.352004 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:56 crc kubenswrapper[4728]: E1205 11:08:56.352055 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.368063 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.384598 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.396603 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.409379 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.421875 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.436994 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.440721 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.440755 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.440765 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.440781 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.440807 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.448525 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.459321 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.470367 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.479979 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.499209 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.510190 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.525358 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.535147 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.543685 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.543730 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.543742 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.543759 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.543769 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.555737 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.570352 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.581559 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.592640 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:08:56Z is after 2025-08-24T17:21:41Z" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.646048 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.646087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.646101 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.646118 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.646130 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.748366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.748416 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.748429 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.748447 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.748459 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.850697 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.850739 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.850749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.850766 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.850774 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.953933 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.953980 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.953997 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.954016 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:56 crc kubenswrapper[4728]: I1205 11:08:56.954028 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:56Z","lastTransitionTime":"2025-12-05T11:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.056230 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.056283 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.056298 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.056315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.056328 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.158109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.158174 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.158185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.158202 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.158215 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.260389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.260426 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.260436 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.260450 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.260462 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.350930 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:57 crc kubenswrapper[4728]: E1205 11:08:57.351049 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.365966 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.366022 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.366035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.366049 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.366062 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.468826 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.468871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.468882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.468897 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.468908 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.571275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.571312 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.571322 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.571339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.571351 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.673884 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.673919 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.673929 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.673946 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.673957 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.776274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.776325 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.776337 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.776354 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.776367 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.878986 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.879031 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.879045 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.879065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.879082 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.981713 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.981760 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.981777 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.981822 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:57 crc kubenswrapper[4728]: I1205 11:08:57.981840 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:57Z","lastTransitionTime":"2025-12-05T11:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.084593 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.084672 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.084683 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.084699 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.084709 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.187562 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.187599 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.187608 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.187623 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.187634 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.289838 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.289902 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.289915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.289933 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.289944 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.351492 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.351540 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.351554 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:08:58 crc kubenswrapper[4728]: E1205 11:08:58.351642 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:08:58 crc kubenswrapper[4728]: E1205 11:08:58.351727 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:08:58 crc kubenswrapper[4728]: E1205 11:08:58.351845 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.392714 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.392738 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.392747 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.392764 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.392773 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.494854 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.494918 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.494928 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.494964 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.494978 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.597322 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.597352 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.597363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.597379 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.597389 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.699637 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.699666 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.699679 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.699696 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.699708 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.801657 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.801706 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.801719 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.801739 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.801754 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.904266 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.904526 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.904592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.904656 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:58 crc kubenswrapper[4728]: I1205 11:08:58.904717 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:58Z","lastTransitionTime":"2025-12-05T11:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.007046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.007087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.007099 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.007114 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.007126 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.109451 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.109502 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.109521 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.109544 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.109560 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.212535 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.212580 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.212592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.212610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.212622 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.315400 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.315473 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.315495 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.315519 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.315534 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.350962 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:08:59 crc kubenswrapper[4728]: E1205 11:08:59.351092 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.418437 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.418480 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.418488 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.418502 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.418512 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.520920 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.520961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.520972 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.520987 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.520999 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.623253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.623296 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.623306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.623322 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.623338 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.725840 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.725884 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.725893 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.725908 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.725920 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.829093 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.829137 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.829147 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.829162 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.829176 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.932130 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.932187 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.932203 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.932226 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:08:59 crc kubenswrapper[4728]: I1205 11:08:59.932244 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:08:59Z","lastTransitionTime":"2025-12-05T11:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.034583 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.034626 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.034637 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.034653 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.034664 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.136969 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.137006 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.137016 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.137103 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.137118 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.245588 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.245647 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.245821 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.245853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.245869 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.327214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.327288 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.327309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.327334 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.327351 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.348956 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:00Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.351623 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.351623 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.352328 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.352503 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.352669 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.352675 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.352946 4728 scope.go:117] "RemoveContainer" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.353232 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.354184 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.354297 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.354385 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.354472 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.354553 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.371434 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:00Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.375784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.376238 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.376466 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.376709 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.376921 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.397705 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:00Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.402471 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.402556 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.402580 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.402611 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.402635 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.419482 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:00Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.423688 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.423728 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.423736 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.423751 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.423760 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.441563 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:00Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:00 crc kubenswrapper[4728]: E1205 11:09:00.441736 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.443569 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.443609 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.443617 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.443633 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.443642 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.546500 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.546550 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.546562 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.546582 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.546596 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.649094 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.649142 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.649152 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.649171 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.649186 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.751313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.751358 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.751372 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.751392 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.751406 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.853164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.853222 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.853237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.853257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.853275 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.956286 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.956349 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.956360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.956378 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:00 crc kubenswrapper[4728]: I1205 11:09:00.956391 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:00Z","lastTransitionTime":"2025-12-05T11:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.059306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.059349 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.059359 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.059376 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.059388 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.162548 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.162610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.162635 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.162667 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.162687 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.265454 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.265498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.265507 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.265524 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.265537 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.351418 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:01 crc kubenswrapper[4728]: E1205 11:09:01.351533 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.367514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.367579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.367593 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.367614 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.367629 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.470202 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.470251 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.470263 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.470280 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.470294 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.573183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.573240 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.573257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.573280 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.573300 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.675536 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.675591 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.675600 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.675620 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.675630 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.778593 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.778629 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.778640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.778657 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.778668 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.881724 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.881827 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.881853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.881880 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.881904 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.985488 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.985547 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.985568 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.985595 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:01 crc kubenswrapper[4728]: I1205 11:09:01.985616 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:01Z","lastTransitionTime":"2025-12-05T11:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.089048 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.089109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.089126 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.089150 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.089168 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.192294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.192352 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.192375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.192404 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.192426 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.295278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.295390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.295408 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.295431 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.295447 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.351864 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.351880 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:02 crc kubenswrapper[4728]: E1205 11:09:02.352013 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.352050 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:02 crc kubenswrapper[4728]: E1205 11:09:02.352147 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:02 crc kubenswrapper[4728]: E1205 11:09:02.352235 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.398632 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.398774 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.398841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.398873 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.398896 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.501268 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.501343 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.501363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.501392 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.501415 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.604954 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.605009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.605046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.605075 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.605098 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.707560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.707622 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.707646 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.707675 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.707696 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.811076 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.811138 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.811149 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.811173 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.811187 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.915173 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.915232 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.915245 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.915271 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:02 crc kubenswrapper[4728]: I1205 11:09:02.915287 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:02Z","lastTransitionTime":"2025-12-05T11:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.018344 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.018418 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.018442 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.018471 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.018516 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.121476 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.121522 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.121533 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.121549 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.121560 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.225177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.225233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.225254 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.225284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.225305 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.328556 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.328599 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.328607 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.328621 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.328632 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.350985 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:03 crc kubenswrapper[4728]: E1205 11:09:03.351106 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.432004 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.432042 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.432053 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.432067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.432076 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.534534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.534584 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.534613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.534633 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.534645 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.637486 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.637545 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.637560 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.637583 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.637600 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.739923 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.740144 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.740152 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.740164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.740173 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.843282 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.843354 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.843367 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.843383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.843394 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.946110 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.946166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.946183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.946207 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:03 crc kubenswrapper[4728]: I1205 11:09:03.946227 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:03Z","lastTransitionTime":"2025-12-05T11:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.049288 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.049358 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.049368 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.049383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.049395 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.151628 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.151692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.151707 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.151729 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.151745 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.254392 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.254433 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.254441 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.254454 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.254461 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.352088 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.352159 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.352102 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:04 crc kubenswrapper[4728]: E1205 11:09:04.352307 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:04 crc kubenswrapper[4728]: E1205 11:09:04.352410 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:04 crc kubenswrapper[4728]: E1205 11:09:04.352562 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.355891 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.355926 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.355936 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.355954 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.356133 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.363613 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.461964 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.462034 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.462046 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.462065 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.462077 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.565007 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.565047 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.565055 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.565069 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.565078 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.667244 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.667283 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.667298 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.667314 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.667324 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.770180 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.770214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.770221 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.770233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.770242 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.872737 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.872784 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.872823 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.872841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.872852 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.976205 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.976262 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.976278 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.976303 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:04 crc kubenswrapper[4728]: I1205 11:09:04.976321 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:04Z","lastTransitionTime":"2025-12-05T11:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.079314 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.079359 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.079372 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.079394 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.079409 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.182274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.182319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.182330 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.182347 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.182359 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.285220 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.285279 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.285296 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.285319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.285335 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.350959 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:05 crc kubenswrapper[4728]: E1205 11:09:05.351104 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.388640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.388692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.388709 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.388735 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.388753 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.491569 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.491651 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.491673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.491692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.491705 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.595029 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.595105 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.595129 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.595163 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.595186 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.697707 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.697825 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.697844 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.697868 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.697886 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.800840 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.800896 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.800910 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.800930 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.800943 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.902841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.902915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.902928 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.902943 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:05 crc kubenswrapper[4728]: I1205 11:09:05.902955 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:05Z","lastTransitionTime":"2025-12-05T11:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.005109 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.005166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.005185 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.005208 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.005225 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.108726 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.108852 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.108880 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.108911 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.108931 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.211267 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.211315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.211326 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.211345 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.211357 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.313701 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.313783 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.313890 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.313952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.313992 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.351903 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.351950 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.351939 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:06 crc kubenswrapper[4728]: E1205 11:09:06.352153 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:06 crc kubenswrapper[4728]: E1205 11:09:06.352282 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:06 crc kubenswrapper[4728]: E1205 11:09:06.352363 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.373062 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.388636 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.405085 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.416683 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.416744 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.416751 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.416765 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.416774 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.421490 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.439395 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.454622 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.467674 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.480694 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.504279 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.518565 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.518599 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.518607 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.518621 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.518631 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.521019 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.532615 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.545537 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.555723 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e7e94e2-79a4-4d0f-b8cb-b31827c3dc44\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f927842bae27f6ef715fcbfdbbb0dbb79e59e4706e28bffd6331140f8a66d7f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.564993 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.576391 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.591433 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.612810 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.620545 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.620594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.620603 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.620615 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.620626 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.623655 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.633976 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:06Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.723569 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.723613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.723631 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.723652 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.723668 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.825781 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.825853 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.825869 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.825888 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.825903 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.928723 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.928762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.928770 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.928785 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:06 crc kubenswrapper[4728]: I1205 11:09:06.928830 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:06Z","lastTransitionTime":"2025-12-05T11:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.031531 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.031570 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.031579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.031594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.031606 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.134633 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.134709 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.134734 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.134763 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.134785 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.237505 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.237572 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.237595 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.237623 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.237648 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.340614 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.340682 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.340700 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.340725 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.340743 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.351833 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:07 crc kubenswrapper[4728]: E1205 11:09:07.351937 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.443405 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.443449 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.443457 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.443473 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.443482 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.546229 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.546284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.546296 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.546319 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.546333 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.648413 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.648455 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.648467 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.648486 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.648501 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.750460 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.750498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.750507 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.750522 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.750532 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.852671 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.852718 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.852736 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.852755 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.852768 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.954885 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.954924 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.954940 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.954955 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:07 crc kubenswrapper[4728]: I1205 11:09:07.954965 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:07Z","lastTransitionTime":"2025-12-05T11:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.057571 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.057613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.057626 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.057642 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.057653 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.160321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.160357 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.160366 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.160379 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.160389 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.262159 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.262194 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.262205 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.262306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.262345 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.351425 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.351522 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:08 crc kubenswrapper[4728]: E1205 11:09:08.351635 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.351656 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:08 crc kubenswrapper[4728]: E1205 11:09:08.351774 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:08 crc kubenswrapper[4728]: E1205 11:09:08.351995 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.364499 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.364545 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.364557 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.364572 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.364583 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.467188 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.467256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.467277 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.467305 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.467326 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.569959 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.570009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.570040 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.570058 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.570070 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.672627 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.672673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.672685 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.672701 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.672715 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.775646 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.775699 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.775712 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.775729 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.775743 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.877682 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.877743 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.877756 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.877772 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.877783 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.980422 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.980492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.980546 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.980572 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:08 crc kubenswrapper[4728]: I1205 11:09:08.980590 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:08Z","lastTransitionTime":"2025-12-05T11:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.083313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.083390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.083417 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.083448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.083473 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.186307 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.186369 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.186389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.186415 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.186435 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.288777 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.288860 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.288869 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.288883 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.288895 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.351955 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.352365 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.392051 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.392125 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.392145 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.392174 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.392191 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.495257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.495298 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.495308 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.495322 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.495330 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.597644 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.597732 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.597751 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.597774 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.597830 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.700200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.700277 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.700294 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.700315 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.700331 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.803582 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.803641 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.803671 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.803695 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.803710 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.906918 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.906996 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.907020 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.907048 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.907064 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:09Z","lastTransitionTime":"2025-12-05T11:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.945830 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.946059 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:09 crc kubenswrapper[4728]: I1205 11:09:09.946104 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.946167 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:13.946128558 +0000 UTC m=+148.088251291 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.946247 4728 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.946314 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:13.946299743 +0000 UTC m=+148.088422456 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.946333 4728 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:09:09 crc kubenswrapper[4728]: E1205 11:09:09.946481 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:13.946452498 +0000 UTC m=+148.088575191 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.010248 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.010299 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.010309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.010326 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.010337 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.046919 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.046999 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047179 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047199 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047212 4728 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047253 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047323 4728 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047353 4728 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047276 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:14.047258568 +0000 UTC m=+148.189381251 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.047469 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:14.047429033 +0000 UTC m=+148.189551786 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.113151 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.113214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.113233 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.113257 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.113276 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.216508 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.216571 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.216585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.216610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.216625 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.319573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.319628 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.319640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.319658 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.319670 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.351210 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.351326 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.351376 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.351402 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.351540 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.351700 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.422062 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.422153 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.422187 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.422216 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.422237 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.488271 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.488346 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.488378 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.488406 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.488427 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.506574 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.511378 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.511436 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.511453 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.511475 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.511489 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.530023 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.534198 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.534241 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.534250 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.534267 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.534278 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.544563 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.548772 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.548883 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.548909 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.548938 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.548961 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.568097 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.573902 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.573985 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.574014 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.574047 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.574067 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.590631 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T11:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"feb38e4d-326c-4c7a-a272-95e0ac54f009\\\",\\\"systemUUID\\\":\\\"65b68dc7-92a1-4fa1-bbc7-423a936860c6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:10Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:10 crc kubenswrapper[4728]: E1205 11:09:10.590757 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.592353 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.592385 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.592396 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.592411 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.592422 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.695492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.695573 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.695597 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.695632 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.695656 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.798480 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.798527 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.798539 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.798557 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.798571 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.901119 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.901177 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.901190 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.901207 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:10 crc kubenswrapper[4728]: I1205 11:09:10.901397 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:10Z","lastTransitionTime":"2025-12-05T11:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.004694 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.004771 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.004858 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.004904 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.004928 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.107083 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.107135 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.107150 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.107168 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.107181 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.209510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.209563 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.209574 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.209592 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.209604 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.312217 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.312293 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.312327 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.312356 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.312377 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.351606 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:11 crc kubenswrapper[4728]: E1205 11:09:11.351912 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.415152 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.415192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.415203 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.415220 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.415231 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.518302 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.518365 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.518389 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.518418 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.518443 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.620680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.620727 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.620740 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.620757 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.620770 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.723527 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.723564 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.723577 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.723594 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.723606 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.827087 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.827146 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.827175 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.827200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.827216 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.930245 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.930324 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.930345 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.930369 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:11 crc kubenswrapper[4728]: I1205 11:09:11.930387 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:11Z","lastTransitionTime":"2025-12-05T11:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.033321 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.033359 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.033374 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.033390 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.033401 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.136528 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.136570 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.136580 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.136595 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.136606 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.239558 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.239640 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.239654 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.239669 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.239679 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.342253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.342322 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.342338 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.342361 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.342377 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.351108 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.351129 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.351613 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:12 crc kubenswrapper[4728]: E1205 11:09:12.351707 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:12 crc kubenswrapper[4728]: E1205 11:09:12.351606 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:12 crc kubenswrapper[4728]: E1205 11:09:12.351842 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.351936 4728 scope.go:117] "RemoveContainer" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.446492 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.446897 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.446912 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.446932 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.446944 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.548625 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.548663 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.548673 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.548688 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.548699 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.651306 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.651347 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.651356 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.651398 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.651410 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.754213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.754261 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.754274 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.754291 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.754301 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.856383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.856436 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.856448 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.856464 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.856474 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.959264 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.959302 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.959313 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.959329 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:12 crc kubenswrapper[4728]: I1205 11:09:12.959341 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:12Z","lastTransitionTime":"2025-12-05T11:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.046573 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/2.log" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.049234 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.049869 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.061740 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.061785 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.061824 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.061845 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.061863 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.070851 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.086987 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.103657 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.115786 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.130865 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.143609 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.155578 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.164166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.164205 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.164218 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.164237 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.164250 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.172111 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.185196 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.209729 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.228615 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.244402 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.260589 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.266235 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.266263 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.266272 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.266285 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.266295 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.274050 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e7e94e2-79a4-4d0f-b8cb-b31827c3dc44\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f927842bae27f6ef715fcbfdbbb0dbb79e59e4706e28bffd6331140f8a66d7f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.284660 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.294410 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.304968 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.322561 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.331616 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.351882 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:13 crc kubenswrapper[4728]: E1205 11:09:13.351984 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.368632 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.368668 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.368678 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.368691 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.368702 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.472244 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.472314 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.472331 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.472358 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.472375 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.579411 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.579451 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.579459 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.579472 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.579481 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.681961 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.682035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.682073 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.682106 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.682186 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.784863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.784917 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.784976 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.785008 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.785037 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.887729 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.887769 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.887778 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.887810 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.887822 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.990849 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.990920 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.990939 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.990962 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:13 crc kubenswrapper[4728]: I1205 11:09:13.990978 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:13Z","lastTransitionTime":"2025-12-05T11:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.054055 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/3.log" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.054914 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/2.log" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.057716 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" exitCode=1 Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.057767 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.057831 4728 scope.go:117] "RemoveContainer" containerID="9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.058978 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:09:14 crc kubenswrapper[4728]: E1205 11:09:14.059246 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.080634 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"600a7698-d6af-4d3a-997c-2af7492b676c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4e303bada68d0a42afcecbe481dd6199dd2e545598d65614cee8ce1097fa3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e0e4eacda19e748d0d3f8e1d76b38c186c2df0f81a171387de0d9016a76c719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c6351e5a4a9441b43ba3145aa818599799ad7fcdd6bf62b5d0ec08eb3a9d5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c60e02416d658dfb8deecf915952febb04b650126edae08c8bf522c50524c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://489eff9110c6e48b0e3119edf09adf0a2a48fc1fdc1bb8e81f113ab882725eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a3a0b726556ab31229fc604b848b63cf812b3720f3b4f594129ddd668807538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3159e0de68b3d0b8641b256a36394f8ffe6c5d020237202e030cfb0292972a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3866d861a4ac7038ed2d8a76bbc880882d38f7d7bb177af7bb318dcaae03b164\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.093450 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.093495 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.093508 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.093525 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.093538 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.095028 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90beec388d3137d02789bb6be800fbb0b1678f8799f133d1e48b1f636f70e8c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://780e71361f1592490b1bbc77b9a869204d3a20319d1bd6c8330d3b0fc81d197f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.112441 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.127625 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.142247 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f26e1600-9bcd-497a-b875-7eaed5b6fba8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47033c6c217ce37f789d031e18e21eeb86ba85f574c6455e15c79a25bfeaecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6b426a3d72e8498cd10b4085d89ffa51beaa52d2dbda7c70c96769e32a20d27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fqbq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-n2qgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.159532 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4abe5ae7-e1e1-4771-bb01-4e62b10e074b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe411cb87a46bdaf8208ebd4162ff18f2f9a2617ca43eb635793364573eb3ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ccc55c28cdfadaa191738047a8d7223cf102d79a9af4d225562a87c42cf1c8a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665078c543b335af45e2ef37840dff719d513125a747f8c07dc74ef97e07354\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.183886 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b1dbb0-8a99-4b3b-870e-771cdaac1bac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T11:08:05Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 11:07:59.827917 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 11:07:59.829563 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2883091425/tls.crt::/tmp/serving-cert-2883091425/tls.key\\\\\\\"\\\\nI1205 11:08:05.319025 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 11:08:05.321746 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 11:08:05.321774 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 11:08:05.321847 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 11:08:05.321860 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 11:08:05.327281 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 11:08:05.327310 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 11:08:05.327322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 11:08:05.327326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 11:08:05.327329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 11:08:05.327332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 11:08:05.327493 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 11:08:05.328771 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.196611 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.196665 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.196683 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.196707 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.196726 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.201398 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://098ed82d50e963f31d1176e06249004a4cc94258aab5dabf6628dead209ae3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.220761 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gf8np" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f292da29-a632-47aa-8bcc-2d999eaa6c11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:52Z\\\",\\\"message\\\":\\\"2025-12-05T11:08:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b\\\\n2025-12-05T11:08:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f396c510-587b-403a-9a9d-1817df435c2b to /host/opt/cni/bin/\\\\n2025-12-05T11:08:07Z [verbose] multus-daemon started\\\\n2025-12-05T11:08:07Z [verbose] Readiness Indicator file check\\\\n2025-12-05T11:08:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dxmch\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gf8np\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.235400 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bfa60b-fcb6-4519-abc5-c25fea50921d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d952894ef1ae2b5d6c23b0d1b85d9d2689e062674a2e4ba53719b6c7290bbaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6r7pf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w8qlp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.254116 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e18c7d32-4ecb-4931-931e-56a7898cb233\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d53da0e296e545d13941882c9568c12f967a274b906dafb14c0905f558a155c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc920344d3980c803873253003fd9befe2e4c1edc9e02866e0d70461834b238e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7a9467cf5e9e68ef04651b47cbc0ed331398c0f113ac9c6bd76ae519edaa297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://457b83f3581701174b7bae2a8d31cca1ffd9396319ee558814cc156f42f03101\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f79805a42a1054302447bc55dc07df80cf19e651cd1c3e783a4614ff49f9f624\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b953abf0e1905c143f473ea9d09341d7602168ff1350c317346bef8dda9402f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7627aff65d1bc4a39148f098ac2e1676d41c980ad3dd2d9494f4bee7b4c0872\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bq4mx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8pwbb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.268034 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zpkw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ed53a3-7ee5-4d66-9e47-be49a9cd1b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71f9a74961573b2aafe3dce854836f51b027ee6abfb12bc2ff57be6524979165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqqjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:09Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zpkw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.278241 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99a5c711-5c13-4615-93fc-9fbf02ce54ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq68g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:20Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2dq9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.291926 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d14e1e8-ec42-4422-b988-d92b82edfcc6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3383601ba7eb5f9957d287898dee5f9fec9cfef59b609b1e525a0e0564d7e86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c4913667ba97a55961e836efe1c65c28881ae9f356084d3c712342aa85b9301\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74f65892b7a9db35105f91763d8b150e62b86dae595e7d6208d47355d51bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20c7713b547195d59fc70a00f6f6de511936ce6f38c6a4bcf89db4880ceebae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.298952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.299037 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.299054 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.299072 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.299086 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.304882 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e7e94e2-79a4-4d0f-b8cb-b31827c3dc44\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f927842bae27f6ef715fcbfdbbb0dbb79e59e4706e28bffd6331140f8a66d7f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d44790495a71b1a699a16a635f5722e5a9ab589f6ad8fd46f6ffc536b56815\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.314650 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-85f5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f536e7a4-ad53-442e-b7c3-8928fcd89f22\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://180e166e57042a6843516cf424020e8aab91131f7e0cc4c6f061de5e46bb0f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r72s7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:06Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-85f5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.324757 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:06Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.339155 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec1c7fc1e4ad2be5a7dfb1eeaa3def2a68d041e302643e875869f6c657c5f6d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.351251 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.351286 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.351270 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:14 crc kubenswrapper[4728]: E1205 11:09:14.351516 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:14 crc kubenswrapper[4728]: E1205 11:09:14.351609 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:14 crc kubenswrapper[4728]: E1205 11:09:14.351960 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.369083 4728 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9275e2bb49afd9d4934b460729b9596662dc3b5b4efb5574cf88a845afd863f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:08:46Z\\\",\\\"message\\\":\\\"-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 11:08:46.188271 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-cluster-machine-approver/machine-approver. OVN-Kubernetes controller took 0.13009727 seconds. No OVN measurement.\\\\nI1205 11:08:46.188282 6432 ovnkube_controller.go:1292] Config duration recorder: kind/namespace/name service/openshift-image-registry/image-registry-operator. OVN-Kubernetes controller took 0.129989518 seconds. No OVN measurement.\\\\nI1205 11:08:46.188337 6432 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 11:08:46.188423 6432 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 11:08:46.188488 6432 ovnkube.go:599] Stopped ovnkube\\\\nI1205 11:08:46.188516 6432 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 11:08:46.188575 6432 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T11:09:13Z\\\",\\\"message\\\":\\\"rt default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:13Z is after 2025-08-24T17:21:41Z]\\\\nI1205 11:09:13.215786 6845 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-etcd-operator/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003c\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T11:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T11:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T11:08:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T11:08:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nv6h7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T11:08:07Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wchlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T11:09:14Z is after 2025-08-24T17:21:41Z" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.401994 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.402032 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.402043 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.402064 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.402077 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.504718 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.504762 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.504774 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.504807 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.504818 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.607164 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.607236 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.607259 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.607284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.607316 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.710502 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.710561 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.710585 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.710613 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.710630 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.813876 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.813956 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.813980 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.814009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.814030 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.916636 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.916680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.916692 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.916706 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:14 crc kubenswrapper[4728]: I1205 11:09:14.916717 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:14Z","lastTransitionTime":"2025-12-05T11:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.019672 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.019722 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.019738 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.019754 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.019765 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.066740 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/3.log" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.070720 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:09:15 crc kubenswrapper[4728]: E1205 11:09:15.070943 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.093363 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-n2qgk" podStartSLOduration=69.093348629 podStartE2EDuration="1m9.093348629s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.093193775 +0000 UTC m=+89.235316478" watchObservedRunningTime="2025-12-05 11:09:15.093348629 +0000 UTC m=+89.235471312" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.122189 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.122243 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.122254 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.122276 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.122290 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.126494 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.126471828 podStartE2EDuration="1m9.126471828s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.126247401 +0000 UTC m=+89.268370094" watchObservedRunningTime="2025-12-05 11:09:15.126471828 +0000 UTC m=+89.268594521" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.126904 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=63.126895481 podStartE2EDuration="1m3.126895481s" podCreationTimestamp="2025-12-05 11:08:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.109928169 +0000 UTC m=+89.252050892" watchObservedRunningTime="2025-12-05 11:09:15.126895481 +0000 UTC m=+89.269018174" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.154866 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gf8np" podStartSLOduration=69.154838084 podStartE2EDuration="1m9.154838084s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.154638818 +0000 UTC m=+89.296761571" watchObservedRunningTime="2025-12-05 11:09:15.154838084 +0000 UTC m=+89.296960777" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.167174 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podStartSLOduration=69.167150025 podStartE2EDuration="1m9.167150025s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.166845426 +0000 UTC m=+89.308968139" watchObservedRunningTime="2025-12-05 11:09:15.167150025 +0000 UTC m=+89.309272728" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.199730 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-8pwbb" podStartSLOduration=69.199674696 podStartE2EDuration="1m9.199674696s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.185720645 +0000 UTC m=+89.327843338" watchObservedRunningTime="2025-12-05 11:09:15.199674696 +0000 UTC m=+89.341797389" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.199987 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-zpkw4" podStartSLOduration=70.199980595 podStartE2EDuration="1m10.199980595s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.198953474 +0000 UTC m=+89.341076177" watchObservedRunningTime="2025-12-05 11:09:15.199980595 +0000 UTC m=+89.342103288" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.224880 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=35.224860605 podStartE2EDuration="35.224860605s" podCreationTimestamp="2025-12-05 11:08:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.224529255 +0000 UTC m=+89.366651968" watchObservedRunningTime="2025-12-05 11:09:15.224860605 +0000 UTC m=+89.366983298" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.225256 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.225284 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.225293 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.225307 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.225317 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.236067 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=11.236038612 podStartE2EDuration="11.236038612s" podCreationTimestamp="2025-12-05 11:09:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.23563995 +0000 UTC m=+89.377762643" watchObservedRunningTime="2025-12-05 11:09:15.236038612 +0000 UTC m=+89.378161305" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.248016 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-85f5z" podStartSLOduration=70.247986543 podStartE2EDuration="1m10.247986543s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.247551219 +0000 UTC m=+89.389673922" watchObservedRunningTime="2025-12-05 11:09:15.247986543 +0000 UTC m=+89.390109236" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.316231 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.31621426 podStartE2EDuration="1m8.31621426s" podCreationTimestamp="2025-12-05 11:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:15.314998103 +0000 UTC m=+89.457120856" watchObservedRunningTime="2025-12-05 11:09:15.31621426 +0000 UTC m=+89.458336953" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.327942 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.327987 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.327998 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.328016 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.328028 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.350976 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:15 crc kubenswrapper[4728]: E1205 11:09:15.351124 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.430524 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.430576 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.430587 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.430605 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.430616 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.533091 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.533143 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.533160 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.533183 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.533201 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.635414 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.635470 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.635488 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.635511 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.635527 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.738477 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.738519 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.738532 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.738549 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.738561 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.841051 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.841094 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.841107 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.841124 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.841138 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.943381 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.943435 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.943444 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.943457 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:15 crc kubenswrapper[4728]: I1205 11:09:15.943467 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:15Z","lastTransitionTime":"2025-12-05T11:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.045212 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.045472 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.045540 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.045601 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.045662 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.148105 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.148364 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.148427 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.148507 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.148605 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.251360 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.251420 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.251432 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.251452 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.251464 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.351099 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:16 crc kubenswrapper[4728]: E1205 11:09:16.353739 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.353856 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.354025 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:16 crc kubenswrapper[4728]: E1205 11:09:16.354033 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:16 crc kubenswrapper[4728]: E1205 11:09:16.354256 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.354691 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.354969 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.355155 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.355676 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.355948 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.459383 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.459450 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.459471 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.459498 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.459519 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.562421 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.562773 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.562899 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.562977 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.563064 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.665610 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.665669 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.665682 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.665700 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.665712 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.768025 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.768295 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.768365 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.768428 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.768504 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.870841 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.870908 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.870920 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.870938 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.870949 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.973973 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.974025 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.974035 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.974049 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:16 crc kubenswrapper[4728]: I1205 11:09:16.974058 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:16Z","lastTransitionTime":"2025-12-05T11:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.076424 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.076496 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.076517 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.076546 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.076573 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.179781 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.179873 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.179894 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.179924 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.179945 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.283042 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.283420 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.283656 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.283909 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.284130 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.351606 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:17 crc kubenswrapper[4728]: E1205 11:09:17.351735 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.386510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.386554 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.386564 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.386579 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.386590 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.489589 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.490009 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.490102 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.490186 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.490316 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.592638 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.592682 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.592694 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.592709 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.592722 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.695309 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.695351 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.695363 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.695380 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.695394 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.798666 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.798731 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.798749 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.798775 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.798833 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.902514 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.902789 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.902863 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.902900 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:17 crc kubenswrapper[4728]: I1205 11:09:17.902924 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:17Z","lastTransitionTime":"2025-12-05T11:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.007088 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.007159 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.007175 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.007196 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.007216 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.109990 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.110067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.110091 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.110121 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.110145 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.212580 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.212637 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.212655 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.212680 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.212697 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.316744 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.316861 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.316880 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.317320 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.317360 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.351757 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:18 crc kubenswrapper[4728]: E1205 11:09:18.351978 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.352097 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:18 crc kubenswrapper[4728]: E1205 11:09:18.352250 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.352293 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:18 crc kubenswrapper[4728]: E1205 11:09:18.352398 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.420927 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.421037 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.421067 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.421101 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.421126 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.523888 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.523963 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.523987 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.524015 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.524048 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.626915 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.626952 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.626962 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.626978 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.626988 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.729821 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.729868 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.729879 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.729893 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.729903 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.832611 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.832687 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.832696 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.832711 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.832722 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.935708 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.935750 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.935758 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.935772 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:18 crc kubenswrapper[4728]: I1205 11:09:18.935781 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:18Z","lastTransitionTime":"2025-12-05T11:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.038166 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.038213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.038249 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.038270 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.038281 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.140432 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.140491 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.140510 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.140534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.140553 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.242919 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.242991 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.243012 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.243036 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.243054 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.345746 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.345883 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.345902 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.345926 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.345943 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.351134 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:19 crc kubenswrapper[4728]: E1205 11:09:19.351384 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.448433 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.448534 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.448555 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.448578 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.448597 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.551145 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.551191 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.551200 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.551214 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.551222 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.653768 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.653858 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.653882 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.653911 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.653932 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.756785 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.756871 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.756884 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.756905 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.756920 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.859212 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.859251 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.859263 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.859280 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.859292 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.961893 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.961958 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.961975 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.962011 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:19 crc kubenswrapper[4728]: I1205 11:09:19.962069 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:19Z","lastTransitionTime":"2025-12-05T11:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.064119 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.064163 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.064175 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.064192 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.064203 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.166678 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.166732 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.166746 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.166763 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.166775 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.269139 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.269184 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.269195 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.269213 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.269224 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.351972 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.351982 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:20 crc kubenswrapper[4728]: E1205 11:09:20.352192 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.351990 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:20 crc kubenswrapper[4728]: E1205 11:09:20.352253 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:20 crc kubenswrapper[4728]: E1205 11:09:20.352323 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.370375 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.370409 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.370420 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.370431 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.370442 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.472509 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.472549 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.472574 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.472589 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.472600 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.575161 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.575275 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.575290 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.575339 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.575357 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.678494 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.678553 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.678563 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.678581 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.678592 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.788259 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.788310 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.788325 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.788357 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.788371 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.794545 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.794587 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.794599 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.794615 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.794627 4728 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T11:09:20Z","lastTransitionTime":"2025-12-05T11:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.837282 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5"] Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.837907 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.841111 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.841176 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.841244 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.842981 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.857543 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a7aa5de-1f6c-4222-af17-c897cee16f44-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.857598 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.857626 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.857639 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a7aa5de-1f6c-4222-af17-c897cee16f44-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.857653 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a7aa5de-1f6c-4222-af17-c897cee16f44-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958290 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958339 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a7aa5de-1f6c-4222-af17-c897cee16f44-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958355 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a7aa5de-1f6c-4222-af17-c897cee16f44-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958395 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a7aa5de-1f6c-4222-af17-c897cee16f44-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958429 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958491 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.958481 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1a7aa5de-1f6c-4222-af17-c897cee16f44-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.959490 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a7aa5de-1f6c-4222-af17-c897cee16f44-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.967869 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a7aa5de-1f6c-4222-af17-c897cee16f44-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:20 crc kubenswrapper[4728]: I1205 11:09:20.981120 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a7aa5de-1f6c-4222-af17-c897cee16f44-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n9tg5\" (UID: \"1a7aa5de-1f6c-4222-af17-c897cee16f44\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:21 crc kubenswrapper[4728]: I1205 11:09:21.156351 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" Dec 05 11:09:21 crc kubenswrapper[4728]: W1205 11:09:21.176142 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a7aa5de_1f6c_4222_af17_c897cee16f44.slice/crio-276c3072e434fec15c2287aa0a48c221c6846ebce00527f852a8d04634691846 WatchSource:0}: Error finding container 276c3072e434fec15c2287aa0a48c221c6846ebce00527f852a8d04634691846: Status 404 returned error can't find the container with id 276c3072e434fec15c2287aa0a48c221c6846ebce00527f852a8d04634691846 Dec 05 11:09:21 crc kubenswrapper[4728]: I1205 11:09:21.351467 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:21 crc kubenswrapper[4728]: E1205 11:09:21.351619 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.096011 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" event={"ID":"1a7aa5de-1f6c-4222-af17-c897cee16f44","Type":"ContainerStarted","Data":"37834445d9e5536ec092921d7a52e683bc7810a9ea552fd89b04b34a369a7a6b"} Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.096072 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" event={"ID":"1a7aa5de-1f6c-4222-af17-c897cee16f44","Type":"ContainerStarted","Data":"276c3072e434fec15c2287aa0a48c221c6846ebce00527f852a8d04634691846"} Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.117113 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n9tg5" podStartSLOduration=77.117093027 podStartE2EDuration="1m17.117093027s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:22.116089317 +0000 UTC m=+96.258212020" watchObservedRunningTime="2025-12-05 11:09:22.117093027 +0000 UTC m=+96.259215720" Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.352201 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.352200 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:22 crc kubenswrapper[4728]: E1205 11:09:22.352339 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:22 crc kubenswrapper[4728]: I1205 11:09:22.352412 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:22 crc kubenswrapper[4728]: E1205 11:09:22.352528 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:22 crc kubenswrapper[4728]: E1205 11:09:22.352607 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:23 crc kubenswrapper[4728]: I1205 11:09:23.351929 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:23 crc kubenswrapper[4728]: E1205 11:09:23.352140 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:23 crc kubenswrapper[4728]: I1205 11:09:23.988384 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:23 crc kubenswrapper[4728]: E1205 11:09:23.988610 4728 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:09:23 crc kubenswrapper[4728]: E1205 11:09:23.988689 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs podName:99a5c711-5c13-4615-93fc-9fbf02ce54ca nodeName:}" failed. No retries permitted until 2025-12-05 11:10:27.988667834 +0000 UTC m=+162.130790547 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs") pod "network-metrics-daemon-2dq9w" (UID: "99a5c711-5c13-4615-93fc-9fbf02ce54ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 11:09:24 crc kubenswrapper[4728]: I1205 11:09:24.351490 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:24 crc kubenswrapper[4728]: I1205 11:09:24.351554 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:24 crc kubenswrapper[4728]: I1205 11:09:24.351569 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:24 crc kubenswrapper[4728]: E1205 11:09:24.351735 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:24 crc kubenswrapper[4728]: E1205 11:09:24.351898 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:24 crc kubenswrapper[4728]: E1205 11:09:24.352012 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:25 crc kubenswrapper[4728]: I1205 11:09:25.351664 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:25 crc kubenswrapper[4728]: E1205 11:09:25.351905 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:26 crc kubenswrapper[4728]: I1205 11:09:26.351499 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:26 crc kubenswrapper[4728]: I1205 11:09:26.351509 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:26 crc kubenswrapper[4728]: E1205 11:09:26.352512 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:26 crc kubenswrapper[4728]: I1205 11:09:26.352590 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:26 crc kubenswrapper[4728]: E1205 11:09:26.352697 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:26 crc kubenswrapper[4728]: E1205 11:09:26.352841 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:27 crc kubenswrapper[4728]: I1205 11:09:27.351602 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:27 crc kubenswrapper[4728]: E1205 11:09:27.351980 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:27 crc kubenswrapper[4728]: I1205 11:09:27.352194 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:09:27 crc kubenswrapper[4728]: E1205 11:09:27.352345 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:09:28 crc kubenswrapper[4728]: I1205 11:09:28.351453 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:28 crc kubenswrapper[4728]: I1205 11:09:28.351483 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:28 crc kubenswrapper[4728]: E1205 11:09:28.351668 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:28 crc kubenswrapper[4728]: I1205 11:09:28.351490 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:28 crc kubenswrapper[4728]: E1205 11:09:28.351739 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:28 crc kubenswrapper[4728]: E1205 11:09:28.351853 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:29 crc kubenswrapper[4728]: I1205 11:09:29.351217 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:29 crc kubenswrapper[4728]: E1205 11:09:29.351364 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:30 crc kubenswrapper[4728]: I1205 11:09:30.351348 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:30 crc kubenswrapper[4728]: I1205 11:09:30.351424 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:30 crc kubenswrapper[4728]: I1205 11:09:30.351427 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:30 crc kubenswrapper[4728]: E1205 11:09:30.351580 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:30 crc kubenswrapper[4728]: E1205 11:09:30.351674 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:30 crc kubenswrapper[4728]: E1205 11:09:30.351753 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:31 crc kubenswrapper[4728]: I1205 11:09:31.350910 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:31 crc kubenswrapper[4728]: E1205 11:09:31.351045 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:32 crc kubenswrapper[4728]: I1205 11:09:32.351449 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:32 crc kubenswrapper[4728]: I1205 11:09:32.351531 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:32 crc kubenswrapper[4728]: I1205 11:09:32.351447 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:32 crc kubenswrapper[4728]: E1205 11:09:32.351663 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:32 crc kubenswrapper[4728]: E1205 11:09:32.351780 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:32 crc kubenswrapper[4728]: E1205 11:09:32.352219 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:33 crc kubenswrapper[4728]: I1205 11:09:33.351037 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:33 crc kubenswrapper[4728]: E1205 11:09:33.351225 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:34 crc kubenswrapper[4728]: I1205 11:09:34.351883 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:34 crc kubenswrapper[4728]: E1205 11:09:34.352035 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:34 crc kubenswrapper[4728]: I1205 11:09:34.352062 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:34 crc kubenswrapper[4728]: E1205 11:09:34.352148 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:34 crc kubenswrapper[4728]: I1205 11:09:34.352595 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:34 crc kubenswrapper[4728]: E1205 11:09:34.352771 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:35 crc kubenswrapper[4728]: I1205 11:09:35.351947 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:35 crc kubenswrapper[4728]: E1205 11:09:35.352709 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:36 crc kubenswrapper[4728]: I1205 11:09:36.351965 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:36 crc kubenswrapper[4728]: E1205 11:09:36.352171 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:36 crc kubenswrapper[4728]: I1205 11:09:36.352324 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:36 crc kubenswrapper[4728]: I1205 11:09:36.352328 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:36 crc kubenswrapper[4728]: E1205 11:09:36.352542 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:36 crc kubenswrapper[4728]: E1205 11:09:36.354248 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:37 crc kubenswrapper[4728]: I1205 11:09:37.352019 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:37 crc kubenswrapper[4728]: E1205 11:09:37.352289 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:38 crc kubenswrapper[4728]: I1205 11:09:38.352043 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:38 crc kubenswrapper[4728]: I1205 11:09:38.352102 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:38 crc kubenswrapper[4728]: E1205 11:09:38.352294 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:38 crc kubenswrapper[4728]: I1205 11:09:38.352338 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:38 crc kubenswrapper[4728]: E1205 11:09:38.352468 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:38 crc kubenswrapper[4728]: E1205 11:09:38.352571 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.153248 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/1.log" Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.154096 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/0.log" Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.154143 4728 generic.go:334] "Generic (PLEG): container finished" podID="f292da29-a632-47aa-8bcc-2d999eaa6c11" containerID="441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4" exitCode=1 Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.154194 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerDied","Data":"441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4"} Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.154281 4728 scope.go:117] "RemoveContainer" containerID="6d525f104a98170107ecc1ae96c4f5c5ff3dda2f976336c219e5a9498725380e" Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.155343 4728 scope.go:117] "RemoveContainer" containerID="441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4" Dec 05 11:09:39 crc kubenswrapper[4728]: E1205 11:09:39.155479 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gf8np_openshift-multus(f292da29-a632-47aa-8bcc-2d999eaa6c11)\"" pod="openshift-multus/multus-gf8np" podUID="f292da29-a632-47aa-8bcc-2d999eaa6c11" Dec 05 11:09:39 crc kubenswrapper[4728]: I1205 11:09:39.351745 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:39 crc kubenswrapper[4728]: E1205 11:09:39.351927 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:40 crc kubenswrapper[4728]: I1205 11:09:40.161066 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/1.log" Dec 05 11:09:40 crc kubenswrapper[4728]: I1205 11:09:40.351926 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:40 crc kubenswrapper[4728]: I1205 11:09:40.352040 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:40 crc kubenswrapper[4728]: E1205 11:09:40.352186 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:40 crc kubenswrapper[4728]: I1205 11:09:40.352247 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:40 crc kubenswrapper[4728]: E1205 11:09:40.353072 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:40 crc kubenswrapper[4728]: E1205 11:09:40.353398 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:41 crc kubenswrapper[4728]: I1205 11:09:41.351483 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:41 crc kubenswrapper[4728]: E1205 11:09:41.351666 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:42 crc kubenswrapper[4728]: I1205 11:09:42.351131 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:42 crc kubenswrapper[4728]: E1205 11:09:42.351266 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:42 crc kubenswrapper[4728]: I1205 11:09:42.351291 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:42 crc kubenswrapper[4728]: I1205 11:09:42.351862 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:42 crc kubenswrapper[4728]: I1205 11:09:42.351904 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:09:42 crc kubenswrapper[4728]: E1205 11:09:42.351989 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:42 crc kubenswrapper[4728]: E1205 11:09:42.352058 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wchlf_openshift-ovn-kubernetes(1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" Dec 05 11:09:42 crc kubenswrapper[4728]: E1205 11:09:42.352265 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:43 crc kubenswrapper[4728]: I1205 11:09:43.351255 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:43 crc kubenswrapper[4728]: E1205 11:09:43.351458 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:44 crc kubenswrapper[4728]: I1205 11:09:44.351349 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:44 crc kubenswrapper[4728]: E1205 11:09:44.351505 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:44 crc kubenswrapper[4728]: I1205 11:09:44.351609 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:44 crc kubenswrapper[4728]: I1205 11:09:44.351650 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:44 crc kubenswrapper[4728]: E1205 11:09:44.351728 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:44 crc kubenswrapper[4728]: E1205 11:09:44.351883 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:45 crc kubenswrapper[4728]: I1205 11:09:45.351520 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:45 crc kubenswrapper[4728]: E1205 11:09:45.352085 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:46 crc kubenswrapper[4728]: E1205 11:09:46.128596 4728 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 11:09:46 crc kubenswrapper[4728]: I1205 11:09:46.351668 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:46 crc kubenswrapper[4728]: I1205 11:09:46.351668 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:46 crc kubenswrapper[4728]: E1205 11:09:46.353759 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:46 crc kubenswrapper[4728]: I1205 11:09:46.353854 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:46 crc kubenswrapper[4728]: E1205 11:09:46.353996 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:46 crc kubenswrapper[4728]: E1205 11:09:46.354133 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:46 crc kubenswrapper[4728]: E1205 11:09:46.493431 4728 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 11:09:47 crc kubenswrapper[4728]: I1205 11:09:47.351723 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:47 crc kubenswrapper[4728]: E1205 11:09:47.351980 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:48 crc kubenswrapper[4728]: I1205 11:09:48.351462 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:48 crc kubenswrapper[4728]: I1205 11:09:48.351562 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:48 crc kubenswrapper[4728]: E1205 11:09:48.351619 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:48 crc kubenswrapper[4728]: I1205 11:09:48.351747 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:48 crc kubenswrapper[4728]: E1205 11:09:48.351925 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:48 crc kubenswrapper[4728]: E1205 11:09:48.352065 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:49 crc kubenswrapper[4728]: I1205 11:09:49.351078 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:49 crc kubenswrapper[4728]: E1205 11:09:49.351268 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:50 crc kubenswrapper[4728]: I1205 11:09:50.351259 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:50 crc kubenswrapper[4728]: I1205 11:09:50.351367 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:50 crc kubenswrapper[4728]: E1205 11:09:50.351449 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:50 crc kubenswrapper[4728]: I1205 11:09:50.351267 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:50 crc kubenswrapper[4728]: E1205 11:09:50.351602 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:50 crc kubenswrapper[4728]: E1205 11:09:50.351749 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:51 crc kubenswrapper[4728]: I1205 11:09:51.351295 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:51 crc kubenswrapper[4728]: E1205 11:09:51.351523 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:51 crc kubenswrapper[4728]: I1205 11:09:51.351662 4728 scope.go:117] "RemoveContainer" containerID="441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4" Dec 05 11:09:51 crc kubenswrapper[4728]: E1205 11:09:51.495314 4728 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 11:09:52 crc kubenswrapper[4728]: I1205 11:09:52.201032 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/1.log" Dec 05 11:09:52 crc kubenswrapper[4728]: I1205 11:09:52.201094 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerStarted","Data":"e6c8a50a0f4dc19dfaf32a236e434e6304cb2a3a6b07fc252a74f5603ecfd7bf"} Dec 05 11:09:52 crc kubenswrapper[4728]: I1205 11:09:52.351392 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:52 crc kubenswrapper[4728]: E1205 11:09:52.351548 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:52 crc kubenswrapper[4728]: I1205 11:09:52.351653 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:52 crc kubenswrapper[4728]: I1205 11:09:52.351668 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:52 crc kubenswrapper[4728]: E1205 11:09:52.351827 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:52 crc kubenswrapper[4728]: E1205 11:09:52.351958 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:53 crc kubenswrapper[4728]: I1205 11:09:53.351014 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:53 crc kubenswrapper[4728]: E1205 11:09:53.351255 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:54 crc kubenswrapper[4728]: I1205 11:09:54.354015 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:54 crc kubenswrapper[4728]: I1205 11:09:54.354780 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:54 crc kubenswrapper[4728]: E1205 11:09:54.354954 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:54 crc kubenswrapper[4728]: I1205 11:09:54.355008 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:54 crc kubenswrapper[4728]: E1205 11:09:54.355100 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:54 crc kubenswrapper[4728]: E1205 11:09:54.355279 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:55 crc kubenswrapper[4728]: I1205 11:09:55.350959 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:55 crc kubenswrapper[4728]: E1205 11:09:55.351144 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:56 crc kubenswrapper[4728]: I1205 11:09:56.351875 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:56 crc kubenswrapper[4728]: E1205 11:09:56.353226 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:56 crc kubenswrapper[4728]: I1205 11:09:56.353249 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:56 crc kubenswrapper[4728]: I1205 11:09:56.353267 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:56 crc kubenswrapper[4728]: E1205 11:09:56.353661 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:56 crc kubenswrapper[4728]: E1205 11:09:56.353725 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:56 crc kubenswrapper[4728]: I1205 11:09:56.353946 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:09:56 crc kubenswrapper[4728]: E1205 11:09:56.496423 4728 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.219926 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/3.log" Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.225403 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerStarted","Data":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.225971 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.233771 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2dq9w"] Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.233933 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:57 crc kubenswrapper[4728]: E1205 11:09:57.234143 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:09:57 crc kubenswrapper[4728]: I1205 11:09:57.351187 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:57 crc kubenswrapper[4728]: E1205 11:09:57.351613 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:58 crc kubenswrapper[4728]: I1205 11:09:58.351908 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:09:58 crc kubenswrapper[4728]: I1205 11:09:58.351920 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:09:58 crc kubenswrapper[4728]: E1205 11:09:58.352074 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:09:58 crc kubenswrapper[4728]: E1205 11:09:58.352183 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:09:59 crc kubenswrapper[4728]: I1205 11:09:59.351714 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:09:59 crc kubenswrapper[4728]: I1205 11:09:59.351812 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:09:59 crc kubenswrapper[4728]: E1205 11:09:59.351976 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:09:59 crc kubenswrapper[4728]: E1205 11:09:59.352353 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:10:00 crc kubenswrapper[4728]: I1205 11:10:00.351489 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:00 crc kubenswrapper[4728]: I1205 11:10:00.351497 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:10:00 crc kubenswrapper[4728]: E1205 11:10:00.351744 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 11:10:00 crc kubenswrapper[4728]: E1205 11:10:00.351911 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.351684 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.351730 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:01 crc kubenswrapper[4728]: E1205 11:10:01.351954 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2dq9w" podUID="99a5c711-5c13-4615-93fc-9fbf02ce54ca" Dec 05 11:10:01 crc kubenswrapper[4728]: E1205 11:10:01.352083 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.760253 4728 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.832682 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podStartSLOduration=115.832656883 podStartE2EDuration="1m55.832656883s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:57.260531468 +0000 UTC m=+131.402654181" watchObservedRunningTime="2025-12-05 11:10:01.832656883 +0000 UTC m=+135.974779596" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.833527 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vllpv"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.834656 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.840586 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.840719 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.842637 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.842666 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.842680 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.843078 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.847617 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.847651 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.848283 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.851399 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.851714 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.851744 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.851995 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.852142 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.852408 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.852594 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.852971 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.853441 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.853850 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-x9m7l"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.854084 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.854415 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.856124 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-lgb9x"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.856657 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.858280 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.858735 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.858785 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.862466 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.862739 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.862923 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.862947 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.862983 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863033 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863179 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863214 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863310 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863350 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863184 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863430 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863477 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863507 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863570 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863589 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863594 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863876 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.863963 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864043 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864126 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864199 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864277 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864363 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864871 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.864981 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.865334 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.871054 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qr78k"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.871324 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.871713 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.872032 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.872248 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.877059 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.877726 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.878119 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.878708 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.881684 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.882293 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.882551 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.883186 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.885088 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.886152 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.886196 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.887335 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.888161 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.888906 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.889230 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.889705 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.889967 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890118 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890301 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890461 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890478 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890666 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.890883 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.891128 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.891369 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.891502 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.891612 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.891836 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.892062 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.892329 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.892414 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.892551 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.893083 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.893739 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.894400 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.894508 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.897996 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.898448 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.904221 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.914859 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.914955 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.914868 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pdjw2"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.915777 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.915920 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.915947 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-76cfh"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.915988 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916172 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916437 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916530 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916678 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916681 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916728 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqk4x\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-kube-api-access-vqk4x\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916771 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-serving-cert\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916810 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr92b\" (UniqueName: \"kubernetes.io/projected/1c8c9c9a-5889-46cd-a366-122310015aa3-kube-api-access-vr92b\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916823 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916829 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ead179c-fe6a-47fd-a4b8-6af96faff785-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e51975c-42dd-458c-a52c-2f8cf11810cf-serving-cert\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916871 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-node-pullsecrets\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916886 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c3990f9-726a-43a5-a84b-eea529806652-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916900 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dw9n\" (UniqueName: \"kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916919 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-audit\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916984 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjwvx\" (UniqueName: \"kubernetes.io/projected/b21b7f08-1b74-4ed3-8a78-f6a03b514069-kube-api-access-qjwvx\") pod \"downloads-7954f5f757-lgb9x\" (UID: \"b21b7f08-1b74-4ed3-8a78-f6a03b514069\") " pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.917013 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s6ck\" (UniqueName: \"kubernetes.io/projected/9ead179c-fe6a-47fd-a4b8-6af96faff785-kube-api-access-2s6ck\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.917050 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwkfn\" (UniqueName: \"kubernetes.io/projected/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-kube-api-access-bwkfn\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.917077 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-etcd-client\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.917104 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-config\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.916833 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918663 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnlhl\" (UniqueName: \"kubernetes.io/projected/132479f7-af34-420b-821f-34c11e07b06e-kube-api-access-qnlhl\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918726 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918746 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918806 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-client\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918834 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c8c9c9a-5889-46cd-a366-122310015aa3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.918955 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-etcd-serving-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919085 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-audit-dir\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919158 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919229 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-dir\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919265 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919289 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919324 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-service-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919352 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f35e95aa-4acc-4b87-a673-1e22826ebe22-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919432 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-serving-cert\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919491 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919539 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919563 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919585 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919605 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c8c9c9a-5889-46cd-a366-122310015aa3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919621 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919648 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c3990f9-726a-43a5-a84b-eea529806652-serving-cert\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919716 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-image-import-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919737 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919757 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlnhg\" (UniqueName: \"kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919779 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919876 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-encryption-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919924 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.919993 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920038 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d56hs\" (UniqueName: \"kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920092 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-encryption-config\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920133 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead179c-fe6a-47fd-a4b8-6af96faff785-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920164 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920191 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920221 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920251 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920276 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920346 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920380 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920412 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920439 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s778f\" (UniqueName: \"kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920464 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920498 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nk7v\" (UniqueName: \"kubernetes.io/projected/4c3990f9-726a-43a5-a84b-eea529806652-kube-api-access-7nk7v\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920524 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d65vg\" (UniqueName: \"kubernetes.io/projected/5e51975c-42dd-458c-a52c-2f8cf11810cf-kube-api-access-d65vg\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920553 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920610 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-policies\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920650 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920693 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920721 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920767 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920816 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920859 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f35e95aa-4acc-4b87-a673-1e22826ebe22-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920890 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.920952 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bndf4\" (UniqueName: \"kubernetes.io/projected/6c786bc2-e4d2-4402-a944-d21132d6087b-kube-api-access-bndf4\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.921006 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.921034 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.928141 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.935609 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.935996 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.936364 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.939143 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.939431 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.939563 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.939451 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.939675 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-c6s8n"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940179 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940505 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940547 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940673 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940820 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.940954 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.941056 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.941171 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.941237 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.943065 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.944283 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.944502 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.945604 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.945720 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lt4gz"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.946438 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.946682 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.951961 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrtl6"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.952106 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.952622 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.952854 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.952974 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.956257 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.957560 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.961190 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.963760 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.965470 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.965839 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.966392 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.967855 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.970128 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.971573 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.978499 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.978759 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.978937 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.980193 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.980621 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.980660 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.980750 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.983045 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.983288 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.983833 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.984001 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.985278 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.985414 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.985849 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.985879 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4r2zd"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.985999 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.986773 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987091 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987288 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987322 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gr77k"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987773 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vllpv"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987834 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.987879 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.988771 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.989393 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.991638 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.992341 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.992407 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.993441 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lgb9x"] Dec 05 11:10:01 crc kubenswrapper[4728]: I1205 11:10:01.999632 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.001932 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.004085 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-gd7w5"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.006358 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.007473 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-x9m7l"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.007729 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.008916 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.011637 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.014412 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrtl6"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.017312 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.018351 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.021888 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.021928 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-config\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.021950 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnlhl\" (UniqueName: \"kubernetes.io/projected/132479f7-af34-420b-821f-34c11e07b06e-kube-api-access-qnlhl\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.021970 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.021993 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c8c9c9a-5889-46cd-a366-122310015aa3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022018 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-etcd-serving-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022038 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-client\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022062 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-dir\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022903 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022973 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-audit-dir\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023015 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023056 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023088 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-service-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023126 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f35e95aa-4acc-4b87-a673-1e22826ebe22-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023177 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-serving-cert\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023219 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023254 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023289 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023319 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023403 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c8c9c9a-5889-46cd-a366-122310015aa3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.023966 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-etcd-serving-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.024465 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-audit-dir\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.025411 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-service-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.026667 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-config\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.022110 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-dir\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027299 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c3990f9-726a-43a5-a84b-eea529806652-serving-cert\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027330 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c8c9c9a-5889-46cd-a366-122310015aa3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027356 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027430 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027508 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-image-import-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027531 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-encryption-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027551 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027573 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlnhg\" (UniqueName: \"kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027593 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027620 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d56hs\" (UniqueName: \"kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027655 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027688 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-encryption-config\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead179c-fe6a-47fd-a4b8-6af96faff785-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027736 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027759 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027780 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027958 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.027976 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028008 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028038 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028073 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s778f\" (UniqueName: \"kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028167 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028302 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nk7v\" (UniqueName: \"kubernetes.io/projected/4c3990f9-726a-43a5-a84b-eea529806652-kube-api-access-7nk7v\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028377 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d65vg\" (UniqueName: \"kubernetes.io/projected/5e51975c-42dd-458c-a52c-2f8cf11810cf-kube-api-access-d65vg\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028411 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-policies\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028433 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028456 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028480 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028504 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028595 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028624 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f35e95aa-4acc-4b87-a673-1e22826ebe22-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028643 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bndf4\" (UniqueName: \"kubernetes.io/projected/6c786bc2-e4d2-4402-a944-d21132d6087b-kube-api-access-bndf4\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028706 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028729 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028758 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqk4x\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-kube-api-access-vqk4x\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028785 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr92b\" (UniqueName: \"kubernetes.io/projected/1c8c9c9a-5889-46cd-a366-122310015aa3-kube-api-access-vr92b\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028842 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ead179c-fe6a-47fd-a4b8-6af96faff785-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028890 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-serving-cert\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028931 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c3990f9-726a-43a5-a84b-eea529806652-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028951 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e51975c-42dd-458c-a52c-2f8cf11810cf-serving-cert\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.028976 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-node-pullsecrets\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029003 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dw9n\" (UniqueName: \"kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029024 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029046 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjwvx\" (UniqueName: \"kubernetes.io/projected/b21b7f08-1b74-4ed3-8a78-f6a03b514069-kube-api-access-qjwvx\") pod \"downloads-7954f5f757-lgb9x\" (UID: \"b21b7f08-1b74-4ed3-8a78-f6a03b514069\") " pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029070 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-audit\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029096 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s6ck\" (UniqueName: \"kubernetes.io/projected/9ead179c-fe6a-47fd-a4b8-6af96faff785-kube-api-access-2s6ck\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029145 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwkfn\" (UniqueName: \"kubernetes.io/projected/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-kube-api-access-bwkfn\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.029177 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-etcd-client\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.031167 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.032926 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.034388 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-audit-policies\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.035904 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5e51975c-42dd-458c-a52c-2f8cf11810cf-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.037701 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.038391 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.038874 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-image-import-ca\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.039353 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-audit\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.043201 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c3990f9-726a-43a5-a84b-eea529806652-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.042930 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.043183 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.042723 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.043543 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-etcd-client\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.044370 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f35e95aa-4acc-4b87-a673-1e22826ebe22-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.044463 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.044821 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.045144 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-client\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.045168 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.045214 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.045251 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/132479f7-af34-420b-821f-34c11e07b06e-node-pullsecrets\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.046371 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.047100 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead179c-fe6a-47fd-a4b8-6af96faff785-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.047552 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.047849 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.048086 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.048610 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.049504 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f35e95aa-4acc-4b87-a673-1e22826ebe22-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.049859 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.050116 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.050578 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.050821 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-serving-cert\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.051111 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.053006 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6c786bc2-e4d2-4402-a944-d21132d6087b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.053104 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.053213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-serving-cert\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.053951 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.054600 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.054928 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ead179c-fe6a-47fd-a4b8-6af96faff785-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.055932 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.055955 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6c786bc2-e4d2-4402-a944-d21132d6087b-encryption-config\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.056258 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.056562 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.057321 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.057503 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e51975c-42dd-458c-a52c-2f8cf11810cf-serving-cert\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.057545 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.058516 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.059022 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.059383 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.060347 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.064190 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gr77k"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.064234 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.064909 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132479f7-af34-420b-821f-34c11e07b06e-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.065257 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c8c9c9a-5889-46cd-a366-122310015aa3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.065261 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c3990f9-726a-43a5-a84b-eea529806652-serving-cert\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.065835 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/132479f7-af34-420b-821f-34c11e07b06e-encryption-config\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.066666 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.071075 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.075536 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.075573 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qr78k"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.078458 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.080043 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.106217 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.109128 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pdjw2"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.111746 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.113719 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.117648 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gd7w5"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.118248 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.119362 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lt4gz"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.121741 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-76cfh"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.121779 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.125074 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.125695 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.128208 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.129868 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.130646 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-jr65f"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.131343 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.131676 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.132743 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.133973 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.134963 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.136136 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4r2zd"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.137112 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.137143 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-kwdb8"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.137767 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.138137 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ppfpq"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.139034 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.139197 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kwdb8"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.140315 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ppfpq"] Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.157381 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.177608 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.197594 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.217263 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.257551 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.277924 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.297475 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.318462 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331180 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac5d7238-f656-4446-9620-49a1ea4f677f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331207 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac5d7238-f656-4446-9620-49a1ea4f677f-config\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331251 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac5d7238-f656-4446-9620-49a1ea4f677f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331453 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331519 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-config\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331582 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331639 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzkgc\" (UniqueName: \"kubernetes.io/projected/f4d671a2-6454-4bf6-a099-0c0e15de2f20-kube-api-access-dzkgc\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331737 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-images\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331774 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwlsw\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331843 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331889 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f4d671a2-6454-4bf6-a099-0c0e15de2f20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.331926 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:02.83191304 +0000 UTC m=+136.974035733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.331946 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.332060 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.332239 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.332305 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.338496 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.350951 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.350999 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.363728 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.378228 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.397969 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.418317 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.433439 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.433653 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:02.933622758 +0000 UTC m=+137.075745471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.433751 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-config\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.433857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp6s5\" (UniqueName: \"kubernetes.io/projected/26d19364-b381-49b6-bf41-9cfe831484f1-kube-api-access-xp6s5\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.433981 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-client\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.434695 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-service-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.434773 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.434897 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7r2s\" (UniqueName: \"kubernetes.io/projected/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-kube-api-access-f7r2s\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.435038 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-images\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.435881 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436124 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f6033c2-7f61-434b-a2c1-e58530ab4196-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436205 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwlsw\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436271 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-config\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436320 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436351 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-auth-proxy-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436406 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436439 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84wsf\" (UniqueName: \"kubernetes.io/projected/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-kube-api-access-84wsf\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436470 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436519 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjh4d\" (UniqueName: \"kubernetes.io/projected/ad2b0cc2-0295-417f-8122-0db0f8f71400-kube-api-access-cjh4d\") pod \"migrator-59844c95c7-xszbp\" (UID: \"ad2b0cc2-0295-417f-8122-0db0f8f71400\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436620 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-proxy-tls\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436653 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436686 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w268p\" (UniqueName: \"kubernetes.io/projected/e0939ff9-1920-4663-84d4-4cef2e8e3588-kube-api-access-w268p\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436314 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-images\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.436897 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437137 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4d6517-ec6f-44c7-bd21-b74438d2b456-serving-cert\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437409 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/51182eb7-b5c6-4108-95cb-c7835d473ae1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437464 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gj7b\" (UniqueName: \"kubernetes.io/projected/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-kube-api-access-5gj7b\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437515 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437550 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-apiservice-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437694 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/985a876e-5d4e-4904-85c6-f10945d269cd-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437754 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3b08810-f132-4eae-99fa-5a68c197e52b-metrics-tls\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437855 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nb8w\" (UniqueName: \"kubernetes.io/projected/51182eb7-b5c6-4108-95cb-c7835d473ae1-kube-api-access-4nb8w\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437935 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.437974 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-serving-cert\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438036 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac5d7238-f656-4446-9620-49a1ea4f677f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438069 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac5d7238-f656-4446-9620-49a1ea4f677f-config\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438103 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-srv-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438172 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac5d7238-f656-4446-9620-49a1ea4f677f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438206 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438240 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwb8n\" (UniqueName: \"kubernetes.io/projected/bf109eee-6f22-421a-bcca-c9eda9726830-kube-api-access-vwb8n\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438274 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-mountpoint-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438420 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp7jv\" (UniqueName: \"kubernetes.io/projected/709fe194-7202-4841-a79c-1bd440f108d2-kube-api-access-lp7jv\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438453 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frsxz\" (UniqueName: \"kubernetes.io/projected/8f6033c2-7f61-434b-a2c1-e58530ab4196-kube-api-access-frsxz\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438503 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpcsw\" (UniqueName: \"kubernetes.io/projected/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-kube-api-access-bpcsw\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438552 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-proxy-tls\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438608 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-config\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438642 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzkgc\" (UniqueName: \"kubernetes.io/projected/f4d671a2-6454-4bf6-a099-0c0e15de2f20-kube-api-access-dzkgc\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438735 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac5d7238-f656-4446-9620-49a1ea4f677f-config\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438812 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-profile-collector-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438842 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.438960 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e00af11a-2662-4b44-9140-5c8d3b5f2834-metrics-tls\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439106 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/26d19364-b381-49b6-bf41-9cfe831484f1-tmpfs\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439129 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f6033c2-7f61-434b-a2c1-e58530ab4196-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439151 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985a876e-5d4e-4904-85c6-f10945d269cd-config\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439236 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cbb9d62-be02-4410-bdc2-c0179576d8ed-service-ca-bundle\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439304 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/40a0ccb9-a83b-4836-8864-5687d054d330-machine-approver-tls\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439357 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439386 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kc96\" (UniqueName: \"kubernetes.io/projected/7f4d6517-ec6f-44c7-bd21-b74438d2b456-kube-api-access-8kc96\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439413 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-stats-auth\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439643 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.439895 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c46lg\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-kube-api-access-c46lg\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440343 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440429 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-metrics-certs\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440557 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/99f17d34-cfff-4706-af23-04fff3d500bd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440865 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce2658a-0a84-40c6-8e42-e83736811aa1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f4d671a2-6454-4bf6-a099-0c0e15de2f20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.440985 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ce2658a-0a84-40c6-8e42-e83736811aa1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441064 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-webhook-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441110 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrjkq\" (UniqueName: \"kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441159 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-serving-cert\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441186 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d671a2-6454-4bf6-a099-0c0e15de2f20-config\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441204 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-socket-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441294 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkfk8\" (UniqueName: \"kubernetes.io/projected/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-kube-api-access-hkfk8\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441353 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-csi-data-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441394 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441451 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wdx5\" (UniqueName: \"kubernetes.io/projected/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-kube-api-access-4wdx5\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441499 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441523 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ce2658a-0a84-40c6-8e42-e83736811aa1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441589 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441617 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-registration-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441677 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441702 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441723 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5d5g\" (UniqueName: \"kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441752 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-plugins-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441780 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br7g9\" (UniqueName: \"kubernetes.io/projected/99f17d34-cfff-4706-af23-04fff3d500bd-kube-api-access-br7g9\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441814 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vffhd\" (UniqueName: \"kubernetes.io/projected/1e090228-7348-4896-8ef7-a14137325478-kube-api-access-vffhd\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.441835 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac5d7238-f656-4446-9620-49a1ea4f677f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.442708 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-images\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.442749 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/709fe194-7202-4841-a79c-1bd440f108d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.442775 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qx9n\" (UniqueName: \"kubernetes.io/projected/977887c0-1f95-4b49-ac6e-34d90aa8d305-kube-api-access-5qx9n\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.442902 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-trusted-ca\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.442925 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.443528 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.443533 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-config\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.443620 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444262 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e00af11a-2662-4b44-9140-5c8d3b5f2834-trusted-ca\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444302 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-default-certificate\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444327 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/985a876e-5d4e-4904-85c6-f10945d269cd-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444398 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444435 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444458 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxnfq\" (UniqueName: \"kubernetes.io/projected/40a0ccb9-a83b-4836-8864-5687d054d330-kube-api-access-hxnfq\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444513 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj8ph\" (UniqueName: \"kubernetes.io/projected/0cbb9d62-be02-4410-bdc2-c0179576d8ed-kube-api-access-fj8ph\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444542 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz44b\" (UniqueName: \"kubernetes.io/projected/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-kube-api-access-hz44b\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444573 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444697 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444816 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444886 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.444918 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqtf\" (UniqueName: \"kubernetes.io/projected/a3b08810-f132-4eae-99fa-5a68c197e52b-kube-api-access-rgqtf\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.445056 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:02.945043339 +0000 UTC m=+137.087166042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.446140 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f4d671a2-6454-4bf6-a099-0c0e15de2f20-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.447090 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.447565 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.447721 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.457903 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.478080 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.498210 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.517958 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.537441 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.546514 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.546691 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wdx5\" (UniqueName: \"kubernetes.io/projected/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-kube-api-access-4wdx5\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.546992 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.046962923 +0000 UTC m=+137.189085616 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547073 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ce2658a-0a84-40c6-8e42-e83736811aa1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547124 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-registration-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547145 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5d5g\" (UniqueName: \"kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547164 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547194 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-plugins-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547211 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br7g9\" (UniqueName: \"kubernetes.io/projected/99f17d34-cfff-4706-af23-04fff3d500bd-kube-api-access-br7g9\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547231 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vffhd\" (UniqueName: \"kubernetes.io/projected/1e090228-7348-4896-8ef7-a14137325478-kube-api-access-vffhd\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547247 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-images\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547267 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/709fe194-7202-4841-a79c-1bd440f108d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547289 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qx9n\" (UniqueName: \"kubernetes.io/projected/977887c0-1f95-4b49-ac6e-34d90aa8d305-kube-api-access-5qx9n\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547315 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-trusted-ca\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547330 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547359 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-config\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547378 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547401 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e00af11a-2662-4b44-9140-5c8d3b5f2834-trusted-ca\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547427 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-default-certificate\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547447 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547452 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-plugins-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547480 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-registration-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547464 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/985a876e-5d4e-4904-85c6-f10945d269cd-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547563 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547589 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxnfq\" (UniqueName: \"kubernetes.io/projected/40a0ccb9-a83b-4836-8864-5687d054d330-kube-api-access-hxnfq\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547616 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj8ph\" (UniqueName: \"kubernetes.io/projected/0cbb9d62-be02-4410-bdc2-c0179576d8ed-kube-api-access-fj8ph\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547639 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz44b\" (UniqueName: \"kubernetes.io/projected/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-kube-api-access-hz44b\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547684 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547706 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547729 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqtf\" (UniqueName: \"kubernetes.io/projected/a3b08810-f132-4eae-99fa-5a68c197e52b-kube-api-access-rgqtf\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547751 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-config\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547775 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp6s5\" (UniqueName: \"kubernetes.io/projected/26d19364-b381-49b6-bf41-9cfe831484f1-kube-api-access-xp6s5\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547825 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-client\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547848 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-service-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547868 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547891 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7r2s\" (UniqueName: \"kubernetes.io/projected/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-kube-api-access-f7r2s\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547911 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f6033c2-7f61-434b-a2c1-e58530ab4196-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547935 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547953 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-config\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.547984 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548005 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548029 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-auth-proxy-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548056 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84wsf\" (UniqueName: \"kubernetes.io/projected/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-kube-api-access-84wsf\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548102 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjh4d\" (UniqueName: \"kubernetes.io/projected/ad2b0cc2-0295-417f-8122-0db0f8f71400-kube-api-access-cjh4d\") pod \"migrator-59844c95c7-xszbp\" (UID: \"ad2b0cc2-0295-417f-8122-0db0f8f71400\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548128 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-proxy-tls\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548151 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w268p\" (UniqueName: \"kubernetes.io/projected/e0939ff9-1920-4663-84d4-4cef2e8e3588-kube-api-access-w268p\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548185 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gj7b\" (UniqueName: \"kubernetes.io/projected/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-kube-api-access-5gj7b\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548207 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4d6517-ec6f-44c7-bd21-b74438d2b456-serving-cert\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548232 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/51182eb7-b5c6-4108-95cb-c7835d473ae1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548261 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548281 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-apiservice-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548303 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/985a876e-5d4e-4904-85c6-f10945d269cd-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548327 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nb8w\" (UniqueName: \"kubernetes.io/projected/51182eb7-b5c6-4108-95cb-c7835d473ae1-kube-api-access-4nb8w\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548362 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3b08810-f132-4eae-99fa-5a68c197e52b-metrics-tls\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.548391 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.048371357 +0000 UTC m=+137.190494120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548459 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548497 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-serving-cert\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548547 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-srv-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548592 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548623 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwb8n\" (UniqueName: \"kubernetes.io/projected/bf109eee-6f22-421a-bcca-c9eda9726830-kube-api-access-vwb8n\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548641 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-mountpoint-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548669 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-trusted-ca\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548674 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-proxy-tls\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548844 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp7jv\" (UniqueName: \"kubernetes.io/projected/709fe194-7202-4841-a79c-1bd440f108d2-kube-api-access-lp7jv\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548862 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-service-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548890 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frsxz\" (UniqueName: \"kubernetes.io/projected/8f6033c2-7f61-434b-a2c1-e58530ab4196-kube-api-access-frsxz\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548936 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpcsw\" (UniqueName: \"kubernetes.io/projected/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-kube-api-access-bpcsw\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548969 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f4d6517-ec6f-44c7-bd21-b74438d2b456-config\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.548983 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-profile-collector-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549013 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549019 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e00af11a-2662-4b44-9140-5c8d3b5f2834-trusted-ca\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549041 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e00af11a-2662-4b44-9140-5c8d3b5f2834-metrics-tls\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549062 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-mountpoint-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549091 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985a876e-5d4e-4904-85c6-f10945d269cd-config\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549131 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/26d19364-b381-49b6-bf41-9cfe831484f1-tmpfs\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549169 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f6033c2-7f61-434b-a2c1-e58530ab4196-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549223 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cbb9d62-be02-4410-bdc2-c0179576d8ed-service-ca-bundle\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549256 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-stats-auth\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549253 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-config\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549288 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/40a0ccb9-a83b-4836-8864-5687d054d330-machine-approver-tls\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549320 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549352 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kc96\" (UniqueName: \"kubernetes.io/projected/7f4d6517-ec6f-44c7-bd21-b74438d2b456-kube-api-access-8kc96\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549393 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549428 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c46lg\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-kube-api-access-c46lg\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549468 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549503 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/99f17d34-cfff-4706-af23-04fff3d500bd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549543 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-metrics-certs\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549598 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce2658a-0a84-40c6-8e42-e83736811aa1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549630 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ce2658a-0a84-40c6-8e42-e83736811aa1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549637 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/26d19364-b381-49b6-bf41-9cfe831484f1-tmpfs\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549668 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkfk8\" (UniqueName: \"kubernetes.io/projected/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-kube-api-access-hkfk8\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549701 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-webhook-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549735 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrjkq\" (UniqueName: \"kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549768 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-serving-cert\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549827 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-socket-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549863 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-csi-data-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549898 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549971 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-socket-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.549993 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-csi-data-dir\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.550257 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cbb9d62-be02-4410-bdc2-c0179576d8ed-service-ca-bundle\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.550257 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.550838 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.553076 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-default-certificate\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.553440 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a3b08810-f132-4eae-99fa-5a68c197e52b-metrics-tls\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.553535 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e00af11a-2662-4b44-9140-5c8d3b5f2834-metrics-tls\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.553947 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-metrics-certs\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.554596 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f4d6517-ec6f-44c7-bd21-b74438d2b456-serving-cert\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.555804 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0cbb9d62-be02-4410-bdc2-c0179576d8ed-stats-auth\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.557419 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.561518 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-client\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.577431 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.582918 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-serving-cert\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.597209 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.602642 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/985a876e-5d4e-4904-85c6-f10945d269cd-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.618055 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.620387 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985a876e-5d4e-4904-85c6-f10945d269cd-config\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.637943 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.638709 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-etcd-ca\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.650624 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.651098 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.151076985 +0000 UTC m=+137.293199688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.652024 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.652424 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.152402746 +0000 UTC m=+137.294525479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.659129 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.679251 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.683542 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ce2658a-0a84-40c6-8e42-e83736811aa1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.698055 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.717816 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.721265 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ce2658a-0a84-40c6-8e42-e83736811aa1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.738377 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.753624 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.753757 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.253736473 +0000 UTC m=+137.395859166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.754307 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.754718 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.254700412 +0000 UTC m=+137.396823145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.758609 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.778629 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.798101 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.799533 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-auth-proxy-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.818265 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.837954 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.844245 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/40a0ccb9-a83b-4836-8864-5687d054d330-machine-approver-tls\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.857571 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.857678 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.858518 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.358478904 +0000 UTC m=+137.500601777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.859458 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.860048 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.360028832 +0000 UTC m=+137.502151565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.863167 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f6033c2-7f61-434b-a2c1-e58530ab4196-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.878128 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.897943 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.900654 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f6033c2-7f61-434b-a2c1-e58530ab4196-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.918456 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.938021 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.958010 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.959132 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40a0ccb9-a83b-4836-8864-5687d054d330-config\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.960949 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:02 crc kubenswrapper[4728]: E1205 11:10:02.962022 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.462006338 +0000 UTC m=+137.604129021 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.976024 4728 request.go:700] Waited for 1.005601731s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-tls&limit=500&resourceVersion=0 Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.977824 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.984337 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/99f17d34-cfff-4706-af23-04fff3d500bd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:02 crc kubenswrapper[4728]: I1205 11:10:02.997691 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.018032 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.037984 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.058303 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.063975 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.064604 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.564572203 +0000 UTC m=+137.706694926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.078042 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.097123 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.098156 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-images\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.117329 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.123695 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-proxy-tls\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.138596 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.141631 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-proxy-tls\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.158056 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.165126 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.165849 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.665815077 +0000 UTC m=+137.807937770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.178112 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.185183 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-srv-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.198702 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.217715 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.238007 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.260299 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.267307 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.267721 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.767708241 +0000 UTC m=+137.909830934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.273898 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1e090228-7348-4896-8ef7-a14137325478-profile-collector-cert\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.274039 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.274400 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.278224 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.297480 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.317539 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.324254 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-serving-cert\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.337773 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.351772 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.351780 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.358240 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.368502 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.368701 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.868670076 +0000 UTC m=+138.010792779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.369051 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.369467 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.86945566 +0000 UTC m=+138.011578563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.369670 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-config\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.377589 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.383088 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/51182eb7-b5c6-4108-95cb-c7835d473ae1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.397774 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.411436 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.423029 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.429989 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.437772 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.458156 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.470711 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.470916 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.97089494 +0000 UTC m=+138.113017633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.471407 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.471963 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:03.971923442 +0000 UTC m=+138.114046135 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.476837 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.497687 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.511441 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/709fe194-7202-4841-a79c-1bd440f108d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.518148 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.538031 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.543684 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-apiservice-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.544323 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26d19364-b381-49b6-bf41-9cfe831484f1-webhook-cert\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.547604 4728 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.547718 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume podName:86cfa1e7-7206-404d-bc2d-bb34f50980ef nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.047702945 +0000 UTC m=+138.189825638 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume") pod "collect-profiles-29415540-x94v5" (UID: "86cfa1e7-7206-404d-bc2d-bb34f50980ef") : failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.547884 4728 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.548052 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key podName:bf109eee-6f22-421a-bcca-c9eda9726830 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.048039365 +0000 UTC m=+138.190162058 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key") pod "service-ca-9c57cc56f-gr77k" (UID: "bf109eee-6f22-421a-bcca-c9eda9726830") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.548926 4728 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.548933 4728 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549056 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs podName:e0939ff9-1920-4663-84d4-4cef2e8e3588 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.049030606 +0000 UTC m=+138.191153339 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs") pod "machine-config-server-jr65f" (UID: "e0939ff9-1920-4663-84d4-4cef2e8e3588") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549104 4728 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549148 4728 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549192 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume podName:c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.049081737 +0000 UTC m=+138.191204540 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume") pod "dns-default-gd7w5" (UID: "c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d") : failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549279 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token podName:e0939ff9-1920-4663-84d4-4cef2e8e3588 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.049214862 +0000 UTC m=+138.191337745 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token") pod "machine-config-server-jr65f" (UID: "e0939ff9-1920-4663-84d4-4cef2e8e3588") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549306 4728 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549317 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert podName:af6fc955-4b84-4c02-a7b0-a3272f9fbf61 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.049300434 +0000 UTC m=+138.191423337 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert") pod "ingress-canary-kwdb8" (UID: "af6fc955-4b84-4c02-a7b0-a3272f9fbf61") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.549365 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert podName:977887c0-1f95-4b49-ac6e-34d90aa8d305 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.049344615 +0000 UTC m=+138.191467338 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert") pod "olm-operator-6b444d44fb-7xxhg" (UID: "977887c0-1f95-4b49-ac6e-34d90aa8d305") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.550410 4728 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.550454 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls podName:c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.050444489 +0000 UTC m=+138.192567372 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls") pod "dns-default-gd7w5" (UID: "c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d") : failed to sync secret cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.550470 4728 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.550595 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle podName:bf109eee-6f22-421a-bcca-c9eda9726830 nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.050540812 +0000 UTC m=+138.192663705 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle") pod "service-ca-9c57cc56f-gr77k" (UID: "bf109eee-6f22-421a-bcca-c9eda9726830") : failed to sync configmap cache: timed out waiting for the condition Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.558393 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.572820 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.573147 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.073080673 +0000 UTC m=+138.215203406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.573522 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.573982 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.07396778 +0000 UTC m=+138.216090683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.577572 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.598126 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.618031 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.638222 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.657946 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.675900 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.676596 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.176572056 +0000 UTC m=+138.318694759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.677328 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.678037 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.178023571 +0000 UTC m=+138.320146284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.683243 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.697663 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.718760 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.738405 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.757597 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.779091 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.779259 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.279238864 +0000 UTC m=+138.421361557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.779493 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.779963 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.279951936 +0000 UTC m=+138.422074849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.792373 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnlhl\" (UniqueName: \"kubernetes.io/projected/132479f7-af34-420b-821f-34c11e07b06e-kube-api-access-qnlhl\") pod \"apiserver-76f77b778f-pdjw2\" (UID: \"132479f7-af34-420b-821f-34c11e07b06e\") " pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.823403 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr92b\" (UniqueName: \"kubernetes.io/projected/1c8c9c9a-5889-46cd-a366-122310015aa3-kube-api-access-vr92b\") pod \"openshift-controller-manager-operator-756b6f6bc6-8b7w5\" (UID: \"1c8c9c9a-5889-46cd-a366-122310015aa3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.833516 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.854963 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dw9n\" (UniqueName: \"kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n\") pod \"route-controller-manager-6576b87f9c-qnmf2\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.878136 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjwvx\" (UniqueName: \"kubernetes.io/projected/b21b7f08-1b74-4ed3-8a78-f6a03b514069-kube-api-access-qjwvx\") pod \"downloads-7954f5f757-lgb9x\" (UID: \"b21b7f08-1b74-4ed3-8a78-f6a03b514069\") " pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.881003 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.881309 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.381274282 +0000 UTC m=+138.523396995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.903011 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bndf4\" (UniqueName: \"kubernetes.io/projected/6c786bc2-e4d2-4402-a944-d21132d6087b-kube-api-access-bndf4\") pod \"apiserver-7bbb656c7d-8d796\" (UID: \"6c786bc2-e4d2-4402-a944-d21132d6087b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.921284 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s6ck\" (UniqueName: \"kubernetes.io/projected/9ead179c-fe6a-47fd-a4b8-6af96faff785-kube-api-access-2s6ck\") pod \"openshift-apiserver-operator-796bbdcf4f-tzm4v\" (UID: \"9ead179c-fe6a-47fd-a4b8-6af96faff785\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.931357 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.941020 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwkfn\" (UniqueName: \"kubernetes.io/projected/e4952f54-bca0-4cae-b32a-f9d8cb0bb91c-kube-api-access-bwkfn\") pod \"cluster-samples-operator-665b6dd947-vrvkz\" (UID: \"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.953998 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d56hs\" (UniqueName: \"kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs\") pod \"controller-manager-879f6c89f-52pf7\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.975937 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlnhg\" (UniqueName: \"kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg\") pod \"oauth-openshift-558db77b4-x9m7l\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.983078 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:03 crc kubenswrapper[4728]: E1205 11:10:03.983483 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.483466695 +0000 UTC m=+138.625589388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.990171 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:03 crc kubenswrapper[4728]: I1205 11:10:03.996152 4728 request.go:700] Waited for 1.944837626s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:03.999968 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s778f\" (UniqueName: \"kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f\") pod \"console-f9d7485db-f6bjc\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.016868 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqk4x\" (UniqueName: \"kubernetes.io/projected/f35e95aa-4acc-4b87-a673-1e22826ebe22-kube-api-access-vqk4x\") pod \"cluster-image-registry-operator-dc59b4c8b-jpc9p\" (UID: \"f35e95aa-4acc-4b87-a673-1e22826ebe22\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.050119 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.050193 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.051379 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.054208 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d65vg\" (UniqueName: \"kubernetes.io/projected/5e51975c-42dd-458c-a52c-2f8cf11810cf-kube-api-access-d65vg\") pod \"authentication-operator-69f744f599-qr78k\" (UID: \"5e51975c-42dd-458c-a52c-2f8cf11810cf\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.059827 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.085644 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nk7v\" (UniqueName: \"kubernetes.io/projected/4c3990f9-726a-43a5-a84b-eea529806652-kube-api-access-7nk7v\") pod \"openshift-config-operator-7777fb866f-8wd9p\" (UID: \"4c3990f9-726a-43a5-a84b-eea529806652\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.086332 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.086568 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.086705 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.087048 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.087836 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.088438 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.586777682 +0000 UTC m=+138.728900385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088483 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088609 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088683 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088730 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088768 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088824 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.088899 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.089014 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.089072 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.090529 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.092538 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/bf109eee-6f22-421a-bcca-c9eda9726830-signing-cabundle\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.093322 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-config-volume\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.093655 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.593635472 +0000 UTC m=+138.735758185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.094257 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.096262 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/bf109eee-6f22-421a-bcca-c9eda9726830-signing-key\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.096338 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/977887c0-1f95-4b49-ac6e-34d90aa8d305-srv-cert\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.099229 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.099686 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-metrics-tls\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.114332 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.114551 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.119964 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.122056 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.124278 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-node-bootstrap-token\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.133284 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.135869 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0939ff9-1920-4663-84d4-4cef2e8e3588-certs\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.137852 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.145008 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pdjw2"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.157381 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.179112 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.191002 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.191529 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.691515833 +0000 UTC m=+138.833638526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.195776 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-cert\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.201352 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.296610 4728 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.304090 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.306445 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.806429106 +0000 UTC m=+138.948551799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.308003 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.308463 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.308941 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.311713 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.324127 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.325013 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" event={"ID":"132479f7-af34-420b-821f-34c11e07b06e","Type":"ContainerStarted","Data":"e71357933ff7994634169db8a2460f857eaaf026b388599c7a83051248276f59"} Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.354503 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.355914 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwlsw\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.371293 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac5d7238-f656-4446-9620-49a1ea4f677f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nvfz5\" (UID: \"ac5d7238-f656-4446-9620-49a1ea4f677f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:04 crc kubenswrapper[4728]: W1205 11:10:04.376982 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod991dbe07_304e_4bd9_9aca_2b29134cc869.slice/crio-d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2 WatchSource:0}: Error finding container d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2: Status 404 returned error can't find the container with id d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2 Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.392408 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzkgc\" (UniqueName: \"kubernetes.io/projected/f4d671a2-6454-4bf6-a099-0c0e15de2f20-kube-api-access-dzkgc\") pod \"machine-api-operator-5694c8668f-vllpv\" (UID: \"f4d671a2-6454-4bf6-a099-0c0e15de2f20\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.405353 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.407110 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:04.907084072 +0000 UTC m=+139.049206765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.421367 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wdx5\" (UniqueName: \"kubernetes.io/projected/282b6a91-2678-4bc8-8577-a5fdcc7e2f9b-kube-api-access-4wdx5\") pod \"service-ca-operator-777779d784-8z7kn\" (UID: \"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.437826 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ce2658a-0a84-40c6-8e42-e83736811aa1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f7jqh\" (UID: \"1ce2658a-0a84-40c6-8e42-e83736811aa1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.456765 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5d5g\" (UniqueName: \"kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g\") pod \"collect-profiles-29415540-x94v5\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.487371 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br7g9\" (UniqueName: \"kubernetes.io/projected/99f17d34-cfff-4706-af23-04fff3d500bd-kube-api-access-br7g9\") pod \"control-plane-machine-set-operator-78cbb6b69f-5x4xq\" (UID: \"99f17d34-cfff-4706-af23-04fff3d500bd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.506623 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.507010 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vffhd\" (UniqueName: \"kubernetes.io/projected/1e090228-7348-4896-8ef7-a14137325478-kube-api-access-vffhd\") pod \"catalog-operator-68c6474976-424hr\" (UID: \"1e090228-7348-4896-8ef7-a14137325478\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.507164 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.00714753 +0000 UTC m=+139.149270413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.512131 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qx9n\" (UniqueName: \"kubernetes.io/projected/977887c0-1f95-4b49-ac6e-34d90aa8d305-kube-api-access-5qx9n\") pod \"olm-operator-6b444d44fb-7xxhg\" (UID: \"977887c0-1f95-4b49-ac6e-34d90aa8d305\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.515922 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.534623 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/985a876e-5d4e-4904-85c6-f10945d269cd-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q4v9s\" (UID: \"985a876e-5d4e-4904-85c6-f10945d269cd\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.553256 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxnfq\" (UniqueName: \"kubernetes.io/projected/40a0ccb9-a83b-4836-8864-5687d054d330-kube-api-access-hxnfq\") pod \"machine-approver-56656f9798-pq24q\" (UID: \"40a0ccb9-a83b-4836-8864-5687d054d330\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.562674 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.569615 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.579061 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.579135 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.590847 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7r2s\" (UniqueName: \"kubernetes.io/projected/af6fc955-4b84-4c02-a7b0-a3272f9fbf61-kube-api-access-f7r2s\") pod \"ingress-canary-kwdb8\" (UID: \"af6fc955-4b84-4c02-a7b0-a3272f9fbf61\") " pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.595085 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.608340 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.609433 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.109402455 +0000 UTC m=+139.251525148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.620576 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.627453 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.627983 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqtf\" (UniqueName: \"kubernetes.io/projected/a3b08810-f132-4eae-99fa-5a68c197e52b-kube-api-access-rgqtf\") pod \"dns-operator-744455d44c-lt4gz\" (UID: \"a3b08810-f132-4eae-99fa-5a68c197e52b\") " pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.636125 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj8ph\" (UniqueName: \"kubernetes.io/projected/0cbb9d62-be02-4410-bdc2-c0179576d8ed-kube-api-access-fj8ph\") pod \"router-default-5444994796-c6s8n\" (UID: \"0cbb9d62-be02-4410-bdc2-c0179576d8ed\") " pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.650946 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz44b\" (UniqueName: \"kubernetes.io/projected/c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d-kube-api-access-hz44b\") pod \"dns-default-gd7w5\" (UID: \"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d\") " pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.671019 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp7jv\" (UniqueName: \"kubernetes.io/projected/709fe194-7202-4841-a79c-1bd440f108d2-kube-api-access-lp7jv\") pod \"multus-admission-controller-857f4d67dd-4r2zd\" (UID: \"709fe194-7202-4841-a79c-1bd440f108d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.674276 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.681891 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.683296 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.689989 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.694698 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.696616 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjh4d\" (UniqueName: \"kubernetes.io/projected/ad2b0cc2-0295-417f-8122-0db0f8f71400-kube-api-access-cjh4d\") pod \"migrator-59844c95c7-xszbp\" (UID: \"ad2b0cc2-0295-417f-8122-0db0f8f71400\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.704542 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kwdb8" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.710600 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.710989 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.210978449 +0000 UTC m=+139.353101142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.717254 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84wsf\" (UniqueName: \"kubernetes.io/projected/8d5bba3b-8ec6-4325-80d2-0d3c24b10987-kube-api-access-84wsf\") pod \"machine-config-operator-74547568cd-g6zpg\" (UID: \"8d5bba3b-8ec6-4325-80d2-0d3c24b10987\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.729780 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w268p\" (UniqueName: \"kubernetes.io/projected/e0939ff9-1920-4663-84d4-4cef2e8e3588-kube-api-access-w268p\") pod \"machine-config-server-jr65f\" (UID: \"e0939ff9-1920-4663-84d4-4cef2e8e3588\") " pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.754465 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.790581 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nb8w\" (UniqueName: \"kubernetes.io/projected/51182eb7-b5c6-4108-95cb-c7835d473ae1-kube-api-access-4nb8w\") pod \"package-server-manager-789f6589d5-7fhtn\" (UID: \"51182eb7-b5c6-4108-95cb-c7835d473ae1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.806573 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.811496 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpcsw\" (UniqueName: \"kubernetes.io/projected/4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5-kube-api-access-bpcsw\") pod \"csi-hostpathplugin-ppfpq\" (UID: \"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5\") " pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.811513 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.812005 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.311982596 +0000 UTC m=+139.454105289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.812126 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.812422 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.312407669 +0000 UTC m=+139.454530362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.833976 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwb8n\" (UniqueName: \"kubernetes.io/projected/bf109eee-6f22-421a-bcca-c9eda9726830-kube-api-access-vwb8n\") pod \"service-ca-9c57cc56f-gr77k\" (UID: \"bf109eee-6f22-421a-bcca-c9eda9726830\") " pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.836435 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.838777 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.843003 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.853408 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qr78k"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.855021 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frsxz\" (UniqueName: \"kubernetes.io/projected/8f6033c2-7f61-434b-a2c1-e58530ab4196-kube-api-access-frsxz\") pod \"kube-storage-version-migrator-operator-b67b599dd-c2jd4\" (UID: \"8f6033c2-7f61-434b-a2c1-e58530ab4196\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.859827 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.862237 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-x9m7l"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.864864 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.872110 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.878453 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kc96\" (UniqueName: \"kubernetes.io/projected/7f4d6517-ec6f-44c7-bd21-b74438d2b456-kube-api-access-8kc96\") pod \"console-operator-58897d9998-76cfh\" (UID: \"7f4d6517-ec6f-44c7-bd21-b74438d2b456\") " pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.884238 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.891750 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c46lg\" (UniqueName: \"kubernetes.io/projected/e00af11a-2662-4b44-9140-5c8d3b5f2834-kube-api-access-c46lg\") pod \"ingress-operator-5b745b69d9-hqq2x\" (UID: \"e00af11a-2662-4b44-9140-5c8d3b5f2834\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.899034 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.906070 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.912728 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.912979 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.412914641 +0000 UTC m=+139.555037324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.913172 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:04 crc kubenswrapper[4728]: E1205 11:10:04.913621 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.413610362 +0000 UTC m=+139.555733055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.915665 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkfk8\" (UniqueName: \"kubernetes.io/projected/f1ff90d9-6a24-416a-a94b-64160dc3d1e2-kube-api-access-hkfk8\") pod \"machine-config-controller-84d6567774-p75qw\" (UID: \"f1ff90d9-6a24-416a-a94b-64160dc3d1e2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.934137 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.934428 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrjkq\" (UniqueName: \"kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq\") pod \"marketplace-operator-79b997595-2clxj\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.941203 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.941668 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.942224 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.942906 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.947102 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lgb9x"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.950350 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.957606 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.966638 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.977060 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.994370 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5"] Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.997354 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jr65f" Dec 05 11:10:04 crc kubenswrapper[4728]: I1205 11:10:04.997460 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.013681 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.013902 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.513874926 +0000 UTC m=+139.655997669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.014169 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.014498 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.514465484 +0000 UTC m=+139.656588167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.029183 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.115002 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.115137 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.61512066 +0000 UTC m=+139.757243343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.115326 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.115600 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.615592555 +0000 UTC m=+139.757715248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.122616 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.149533 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.212289 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.216276 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.216437 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.716417286 +0000 UTC m=+139.858539979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.216626 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.216948 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.716936692 +0000 UTC m=+139.859059385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.317422 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.317595 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.817570887 +0000 UTC m=+139.959693590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.318030 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.318390 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.818380232 +0000 UTC m=+139.960503095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.330329 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" event={"ID":"991dbe07-304e-4bd9-9aca-2b29134cc869","Type":"ContainerStarted","Data":"d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2"} Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.418532 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.418742 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.918722758 +0000 UTC m=+140.060845461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.418842 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.419309 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:05.919287716 +0000 UTC m=+140.061410599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.519290 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.519445 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.019418276 +0000 UTC m=+140.161540979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.519505 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.519966 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.019947012 +0000 UTC m=+140.162069725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.620314 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.620429 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.120415892 +0000 UTC m=+140.262538585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.620622 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.621076 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.121067372 +0000 UTC m=+140.263190065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.721840 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.722032 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.222008547 +0000 UTC m=+140.364131240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.722317 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.722748 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.222729729 +0000 UTC m=+140.364852422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.754651 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gj7b\" (UniqueName: \"kubernetes.io/projected/43ab850c-2ecf-49b9-b0ff-9f49befb53c3-kube-api-access-5gj7b\") pod \"etcd-operator-b45778765-zrtl6\" (UID: \"43ab850c-2ecf-49b9-b0ff-9f49befb53c3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.756100 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.759915 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod049f4ab0_4d3e_45ab_b390_e4c80a919880.slice/crio-3b37797406c86e17bea17cf0210aed8f2becda7e3187b0f9ed4d56094ef98d77 WatchSource:0}: Error finding container 3b37797406c86e17bea17cf0210aed8f2becda7e3187b0f9ed4d56094ef98d77: Status 404 returned error can't find the container with id 3b37797406c86e17bea17cf0210aed8f2becda7e3187b0f9ed4d56094ef98d77 Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.761655 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp6s5\" (UniqueName: \"kubernetes.io/projected/26d19364-b381-49b6-bf41-9cfe831484f1-kube-api-access-xp6s5\") pod \"packageserver-d55dfcdfc-vh9hw\" (UID: \"26d19364-b381-49b6-bf41-9cfe831484f1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.771630 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod985a876e_5d4e_4904_85c6_f10945d269cd.slice/crio-00af138ff9840b54f0195b1dd8055d075c43ea37f6ae245903917dff8de4d83e WatchSource:0}: Error finding container 00af138ff9840b54f0195b1dd8055d075c43ea37f6ae245903917dff8de4d83e: Status 404 returned error can't find the container with id 00af138ff9840b54f0195b1dd8055d075c43ea37f6ae245903917dff8de4d83e Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.781946 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf35e95aa_4acc_4b87_a673_1e22826ebe22.slice/crio-e158f2387666d6d461e094cc1b1e5f6be31ad0c90ed12f44836f64d5308146b0 WatchSource:0}: Error finding container e158f2387666d6d461e094cc1b1e5f6be31ad0c90ed12f44836f64d5308146b0: Status 404 returned error can't find the container with id e158f2387666d6d461e094cc1b1e5f6be31ad0c90ed12f44836f64d5308146b0 Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.789179 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e51975c_42dd_458c_a52c_2f8cf11810cf.slice/crio-ec4e48e93c65b054a89d4348233b48b7349509cfe7a3e371de971e56ac134834 WatchSource:0}: Error finding container ec4e48e93c65b054a89d4348233b48b7349509cfe7a3e371de971e56ac134834: Status 404 returned error can't find the container with id ec4e48e93c65b054a89d4348233b48b7349509cfe7a3e371de971e56ac134834 Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.792492 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c786bc2_e4d2_4402_a944_d21132d6087b.slice/crio-e20eb83eadcce7bafe932a616c3c899c68f4d2be1e5a05eac4935bdfdb5baccb WatchSource:0}: Error finding container e20eb83eadcce7bafe932a616c3c899c68f4d2be1e5a05eac4935bdfdb5baccb: Status 404 returned error can't find the container with id e20eb83eadcce7bafe932a616c3c899c68f4d2be1e5a05eac4935bdfdb5baccb Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.796519 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd92fe6c3_10f5_4151_86cb_236a4c79463b.slice/crio-c26b67a977c20de5a4193e632d3e419928cac1abce3a721d076ec77c80de27a0 WatchSource:0}: Error finding container c26b67a977c20de5a4193e632d3e419928cac1abce3a721d076ec77c80de27a0: Status 404 returned error can't find the container with id c26b67a977c20de5a4193e632d3e419928cac1abce3a721d076ec77c80de27a0 Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.807838 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb21b7f08_1b74_4ed3_8a78_f6a03b514069.slice/crio-eba7a64756250960325a7272cdfa7ef849b5f99cf61a212cd5686ec96af41819 WatchSource:0}: Error finding container eba7a64756250960325a7272cdfa7ef849b5f99cf61a212cd5686ec96af41819: Status 404 returned error can't find the container with id eba7a64756250960325a7272cdfa7ef849b5f99cf61a212cd5686ec96af41819 Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.811958 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac5d7238_f656_4446_9620_49a1ea4f677f.slice/crio-5e1aefbb6404f43bd1ee1763114f1a514d731a512b4f7721287ac9ec1a799162 WatchSource:0}: Error finding container 5e1aefbb6404f43bd1ee1763114f1a514d731a512b4f7721287ac9ec1a799162: Status 404 returned error can't find the container with id 5e1aefbb6404f43bd1ee1763114f1a514d731a512b4f7721287ac9ec1a799162 Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.823050 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.823437 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.323396665 +0000 UTC m=+140.465519358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.823526 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.825145 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.325134719 +0000 UTC m=+140.467257412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.859272 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:05 crc kubenswrapper[4728]: I1205 11:10:05.924501 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:05 crc kubenswrapper[4728]: E1205 11:10:05.924979 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.424959029 +0000 UTC m=+140.567081722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:05 crc kubenswrapper[4728]: W1205 11:10:05.958866 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cbb9d62_be02_4410_bdc2_c0179576d8ed.slice/crio-a63c02d68559ac52ff05e1ca7384b996699727c21a25134405a45146f838bb1d WatchSource:0}: Error finding container a63c02d68559ac52ff05e1ca7384b996699727c21a25134405a45146f838bb1d: Status 404 returned error can't find the container with id a63c02d68559ac52ff05e1ca7384b996699727c21a25134405a45146f838bb1d Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.016749 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh"] Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.027255 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.027590 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.527573405 +0000 UTC m=+140.669696098 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.042862 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw"] Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.128416 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.129029 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.629013635 +0000 UTC m=+140.771136328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.230765 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.231433 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.731422595 +0000 UTC m=+140.873545278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: W1205 11:10:06.319669 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1ff90d9_6a24_416a_a94b_64160dc3d1e2.slice/crio-2125353ea2037d4ab6ea640863ff1ef2f09f3a1ffd0d599b66cc887c66c951d0 WatchSource:0}: Error finding container 2125353ea2037d4ab6ea640863ff1ef2f09f3a1ffd0d599b66cc887c66c951d0: Status 404 returned error can't find the container with id 2125353ea2037d4ab6ea640863ff1ef2f09f3a1ffd0d599b66cc887c66c951d0 Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.327448 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vllpv"] Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.331684 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.331779 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.831761641 +0000 UTC m=+140.973884334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.331977 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.332328 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.832318318 +0000 UTC m=+140.974441011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.343132 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f6bjc" event={"ID":"fda4dac4-0200-4740-a9c1-c3897809c2c0","Type":"ContainerStarted","Data":"9ed360310291c085deb9e0eee4983d0af107074582cc1cca853c83e4bccaf740"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.345893 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" event={"ID":"40a0ccb9-a83b-4836-8864-5687d054d330","Type":"ContainerStarted","Data":"e3791ed4b7ccc1ea319a41ac1569c1b1f459fab7d45496bcd6412449220379d6"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.346859 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" event={"ID":"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c","Type":"ContainerStarted","Data":"472c2625b07bf62f6274d4e2d4706bbaac8246ce74946a99bf98a11f9d75c70b"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.348102 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" event={"ID":"f35e95aa-4acc-4b87-a673-1e22826ebe22","Type":"ContainerStarted","Data":"e158f2387666d6d461e094cc1b1e5f6be31ad0c90ed12f44836f64d5308146b0"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.350610 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" event={"ID":"1c8c9c9a-5889-46cd-a366-122310015aa3","Type":"ContainerStarted","Data":"e8e77b8b2b2aeec94c57ca07ad3d4a89ab5aaa18448d469baef5cd35a2c1e5fb"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.352815 4728 generic.go:334] "Generic (PLEG): container finished" podID="132479f7-af34-420b-821f-34c11e07b06e" containerID="5efae2ce8ba67427cb62f165f6329e39ee5b5dd20fa5f1b0131a8954a977584f" exitCode=0 Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404785 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" event={"ID":"132479f7-af34-420b-821f-34c11e07b06e","Type":"ContainerDied","Data":"5efae2ce8ba67427cb62f165f6329e39ee5b5dd20fa5f1b0131a8954a977584f"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404867 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" event={"ID":"4c3990f9-726a-43a5-a84b-eea529806652","Type":"ContainerStarted","Data":"bff20075b8c57078483ca1d496abecb3a087010d4b35c9508f6c4a423346f968"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404888 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404907 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" event={"ID":"6c786bc2-e4d2-4402-a944-d21132d6087b","Type":"ContainerStarted","Data":"e20eb83eadcce7bafe932a616c3c899c68f4d2be1e5a05eac4935bdfdb5baccb"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404918 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" event={"ID":"ac5d7238-f656-4446-9620-49a1ea4f677f","Type":"ContainerStarted","Data":"5e1aefbb6404f43bd1ee1763114f1a514d731a512b4f7721287ac9ec1a799162"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404928 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" event={"ID":"1ce2658a-0a84-40c6-8e42-e83736811aa1","Type":"ContainerStarted","Data":"1d00cc04d52ae7ee6f0be6063fd5733942731df924cf3e57f121f1c5b2178015"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404937 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" event={"ID":"985a876e-5d4e-4904-85c6-f10945d269cd","Type":"ContainerStarted","Data":"00af138ff9840b54f0195b1dd8055d075c43ea37f6ae245903917dff8de4d83e"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404946 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" event={"ID":"f1ff90d9-6a24-416a-a94b-64160dc3d1e2","Type":"ContainerStarted","Data":"2125353ea2037d4ab6ea640863ff1ef2f09f3a1ffd0d599b66cc887c66c951d0"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404956 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jr65f" event={"ID":"e0939ff9-1920-4663-84d4-4cef2e8e3588","Type":"ContainerStarted","Data":"df574c8cfea1180f901897d4164244c45461e6796760b26f4795bce25a99d908"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404965 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" event={"ID":"d92fe6c3-10f5-4151-86cb-236a4c79463b","Type":"ContainerStarted","Data":"c26b67a977c20de5a4193e632d3e419928cac1abce3a721d076ec77c80de27a0"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404974 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" event={"ID":"049f4ab0-4d3e-45ab-b390-e4c80a919880","Type":"ContainerStarted","Data":"3b37797406c86e17bea17cf0210aed8f2becda7e3187b0f9ed4d56094ef98d77"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404983 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" event={"ID":"991dbe07-304e-4bd9-9aca-2b29134cc869","Type":"ContainerStarted","Data":"c6e728070843e73bbe7f9c6d28fc805ab68b553575998435e221af8d30b7c317"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.404992 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lgb9x" event={"ID":"b21b7f08-1b74-4ed3-8a78-f6a03b514069","Type":"ContainerStarted","Data":"eba7a64756250960325a7272cdfa7ef849b5f99cf61a212cd5686ec96af41819"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.405001 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-c6s8n" event={"ID":"0cbb9d62-be02-4410-bdc2-c0179576d8ed","Type":"ContainerStarted","Data":"a63c02d68559ac52ff05e1ca7384b996699727c21a25134405a45146f838bb1d"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.405010 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" event={"ID":"5e51975c-42dd-458c-a52c-2f8cf11810cf","Type":"ContainerStarted","Data":"ec4e48e93c65b054a89d4348233b48b7349509cfe7a3e371de971e56ac134834"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.405503 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" event={"ID":"9ead179c-fe6a-47fd-a4b8-6af96faff785","Type":"ContainerStarted","Data":"acfd6e13f86f68e149fda3e9c66cbb80f10d180bd96efd228e522ec1ac9600e0"} Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.433292 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.433445 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.933420358 +0000 UTC m=+141.075543051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.433608 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.433921 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:06.933914173 +0000 UTC m=+141.076036866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.515904 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.535329 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.535528 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.035494938 +0000 UTC m=+141.177617631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.536042 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.536346 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.036337713 +0000 UTC m=+141.178460586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.632323 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" podStartSLOduration=120.627645293 podStartE2EDuration="2m0.627645293s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:06.598585492 +0000 UTC m=+140.740708215" watchObservedRunningTime="2025-12-05 11:10:06.627645293 +0000 UTC m=+140.769768006" Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.638553 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.638910 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.138777684 +0000 UTC m=+141.280900377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.639356 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.639809 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.139780825 +0000 UTC m=+141.281903518 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.740996 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.741370 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.241352969 +0000 UTC m=+141.383475662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.842964 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.844155 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.3441357 +0000 UTC m=+141.486258563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.944178 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.944565 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.444526998 +0000 UTC m=+141.586649691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:06 crc kubenswrapper[4728]: I1205 11:10:06.944967 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:06 crc kubenswrapper[4728]: E1205 11:10:06.947330 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.447285563 +0000 UTC m=+141.589408256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.046868 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.047975 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.547951139 +0000 UTC m=+141.690073832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.166915 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.167291 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.667275767 +0000 UTC m=+141.809398460 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.307219 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.307330 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.807309431 +0000 UTC m=+141.949432124 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.307489 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.307956 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.80794736 +0000 UTC m=+141.950070053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.412915 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.413565 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:07.913545658 +0000 UTC m=+142.055668351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.426551 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lgb9x" event={"ID":"b21b7f08-1b74-4ed3-8a78-f6a03b514069","Type":"ContainerStarted","Data":"acfc80b2aa57e1577caa0c30d68ecce9e2bc61e486206ddf663295e070372522"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.427584 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.437867 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-c6s8n" event={"ID":"0cbb9d62-be02-4410-bdc2-c0179576d8ed","Type":"ContainerStarted","Data":"d77a4d9dfd35269c8d0ae4b789492239ae1253558b2c2d55bf7002adcc502d11"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.438057 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.438085 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.467374 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" event={"ID":"5e51975c-42dd-458c-a52c-2f8cf11810cf","Type":"ContainerStarted","Data":"02423886a263680cd83fb2403498491dc3f44cf17f7f46cf09eacc128fd05bd7"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.475173 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-lgb9x" podStartSLOduration=121.475144016 podStartE2EDuration="2m1.475144016s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.468971517 +0000 UTC m=+141.611094230" watchObservedRunningTime="2025-12-05 11:10:07.475144016 +0000 UTC m=+141.617266739" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.491136 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" event={"ID":"9ead179c-fe6a-47fd-a4b8-6af96faff785","Type":"ContainerStarted","Data":"d0d2a851ccf2fc1d2c9e4d069d659d39fa7061f85d9fecfa3e3957343a318787"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.505399 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" event={"ID":"f1ff90d9-6a24-416a-a94b-64160dc3d1e2","Type":"ContainerStarted","Data":"9328c9881506f27e990cd2b5792506b9766d5c5254613e69229d18a9e4c35184"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.514573 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" event={"ID":"f35e95aa-4acc-4b87-a673-1e22826ebe22","Type":"ContainerStarted","Data":"65b79c5d8f0282ab01b72074cdb2061cd87c398d47483c047ad2af16d5186265"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.514638 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.516197 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.016184404 +0000 UTC m=+142.158307097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.522119 4728 generic.go:334] "Generic (PLEG): container finished" podID="4c3990f9-726a-43a5-a84b-eea529806652" containerID="73ba7e9edce3a3b6d5a1feb7f12eebbce015b60e65e4b141dcdbd8e3c5d8e8fc" exitCode=0 Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.522200 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" event={"ID":"4c3990f9-726a-43a5-a84b-eea529806652","Type":"ContainerDied","Data":"73ba7e9edce3a3b6d5a1feb7f12eebbce015b60e65e4b141dcdbd8e3c5d8e8fc"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.526651 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" event={"ID":"f4d671a2-6454-4bf6-a099-0c0e15de2f20","Type":"ContainerStarted","Data":"f3aa13b8cd58487e85e38c713438f68b04d233b42a1200d4769698a0b7597564"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.529713 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" event={"ID":"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c","Type":"ContainerStarted","Data":"797e4ab74eae71bf0d1cd07363b054721c39e095c257b37bb7851fc8f6c5d735"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.533594 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" event={"ID":"1c8c9c9a-5889-46cd-a366-122310015aa3","Type":"ContainerStarted","Data":"942bd229043b281a180aebbf07e00471bf6f134287e50c63e5d8fb510bc964e5"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.545038 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-qr78k" podStartSLOduration=122.545022928 podStartE2EDuration="2m2.545022928s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.544564474 +0000 UTC m=+141.686687177" watchObservedRunningTime="2025-12-05 11:10:07.545022928 +0000 UTC m=+141.687145631" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.566632 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" event={"ID":"049f4ab0-4d3e-45ab-b390-e4c80a919880","Type":"ContainerStarted","Data":"8324ca88cc1607deceff1034311a2ecb0bf902f2b8e7606e865bddfcdc933c5d"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.569106 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.569535 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.572921 4728 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-52pf7 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.572971 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.603435 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-c6s8n" podStartSLOduration=121.603417388 podStartE2EDuration="2m1.603417388s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.580021261 +0000 UTC m=+141.722143964" watchObservedRunningTime="2025-12-05 11:10:07.603417388 +0000 UTC m=+141.745540091" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.604413 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.615214 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.615662 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.115622852 +0000 UTC m=+142.257745545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.616160 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.616410 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.116399196 +0000 UTC m=+142.258521889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.662630 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jpc9p" podStartSLOduration=121.662612523 podStartE2EDuration="2m1.662612523s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.642708913 +0000 UTC m=+141.784831606" watchObservedRunningTime="2025-12-05 11:10:07.662612523 +0000 UTC m=+141.804735216" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.670385 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f6bjc" event={"ID":"fda4dac4-0200-4740-a9c1-c3897809c2c0","Type":"ContainerStarted","Data":"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd"} Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.724807 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gd7w5"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.725089 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.726844 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.727136 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.227121561 +0000 UTC m=+142.369244254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.742245 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.744914 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4r2zd"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.754984 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8b7w5" podStartSLOduration=121.754963514 podStartE2EDuration="2m1.754963514s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.696568984 +0000 UTC m=+141.838691697" watchObservedRunningTime="2025-12-05 11:10:07.754963514 +0000 UTC m=+141.897086207" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.779858 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" podStartSLOduration=121.779839437 podStartE2EDuration="2m1.779839437s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.744826964 +0000 UTC m=+141.886949667" watchObservedRunningTime="2025-12-05 11:10:07.779839437 +0000 UTC m=+141.921962150" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.780588 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tzm4v" podStartSLOduration=122.78058297 podStartE2EDuration="2m2.78058297s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.775412761 +0000 UTC m=+141.917535464" watchObservedRunningTime="2025-12-05 11:10:07.78058297 +0000 UTC m=+141.922705663" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.785405 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.804604 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg"] Dec 05 11:10:07 crc kubenswrapper[4728]: W1205 11:10:07.804967 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod709fe194_7202_4841_a79c_1bd440f108d2.slice/crio-0f7cc317222f1700aaaeec6c79e55e18282fe3265741bea51adb08b435d19832 WatchSource:0}: Error finding container 0f7cc317222f1700aaaeec6c79e55e18282fe3265741bea51adb08b435d19832: Status 404 returned error can't find the container with id 0f7cc317222f1700aaaeec6c79e55e18282fe3265741bea51adb08b435d19832 Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.814471 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-f6bjc" podStartSLOduration=121.814452398 podStartE2EDuration="2m1.814452398s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:07.80635729 +0000 UTC m=+141.948480003" watchObservedRunningTime="2025-12-05 11:10:07.814452398 +0000 UTC m=+141.956575091" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.816017 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.823588 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kwdb8"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.825905 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-76cfh"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.828251 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lt4gz"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.828983 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.832076 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.332061678 +0000 UTC m=+142.474184371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.837630 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.859204 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:07 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:07 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:07 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.859439 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.868160 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrtl6"] Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.876544 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x"] Dec 05 11:10:07 crc kubenswrapper[4728]: W1205 11:10:07.890049 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3b08810_f132_4eae_99fa_5a68c197e52b.slice/crio-88afed137a7dd7cf134c7c5c6e53f7c83ab430601428a359dc4287f59dceb47d WatchSource:0}: Error finding container 88afed137a7dd7cf134c7c5c6e53f7c83ab430601428a359dc4287f59dceb47d: Status 404 returned error can't find the container with id 88afed137a7dd7cf134c7c5c6e53f7c83ab430601428a359dc4287f59dceb47d Dec 05 11:10:07 crc kubenswrapper[4728]: W1205 11:10:07.893715 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43ab850c_2ecf_49b9_b0ff_9f49befb53c3.slice/crio-46793633786ea3940fee0cc8e20ab536ffe9914c6a37536b138edfc3bc96305b WatchSource:0}: Error finding container 46793633786ea3940fee0cc8e20ab536ffe9914c6a37536b138edfc3bc96305b: Status 404 returned error can't find the container with id 46793633786ea3940fee0cc8e20ab536ffe9914c6a37536b138edfc3bc96305b Dec 05 11:10:07 crc kubenswrapper[4728]: I1205 11:10:07.933019 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:07 crc kubenswrapper[4728]: E1205 11:10:07.933899 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.433866119 +0000 UTC m=+142.575988812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.005414 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ppfpq"] Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.036304 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.036722 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.536710492 +0000 UTC m=+142.678833185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.092004 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4"] Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.103462 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn"] Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.117695 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:10:08 crc kubenswrapper[4728]: W1205 11:10:08.121936 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b1a43d6_1fb2_48a3_acfe_e656d1dbeff5.slice/crio-3e58256a8115290a67afa486edcaf94db5e391989feb1a6d6b88a5f4ef530e25 WatchSource:0}: Error finding container 3e58256a8115290a67afa486edcaf94db5e391989feb1a6d6b88a5f4ef530e25: Status 404 returned error can't find the container with id 3e58256a8115290a67afa486edcaf94db5e391989feb1a6d6b88a5f4ef530e25 Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.134368 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gr77k"] Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.137455 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.137898 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.637875464 +0000 UTC m=+142.779998157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.195508 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw"] Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.240178 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.240564 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.740549632 +0000 UTC m=+142.882672325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.340960 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.341335 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.841321492 +0000 UTC m=+142.983444185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.341912 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.342155 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.842146157 +0000 UTC m=+142.984268850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.442960 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.443541 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:08.943525445 +0000 UTC m=+143.085648138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.547023 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.547393 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.047377999 +0000 UTC m=+143.189500692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.655457 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.655615 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.155588707 +0000 UTC m=+143.297711400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.656092 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.656384 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.156372761 +0000 UTC m=+143.298495454 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.719521 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" event={"ID":"985a876e-5d4e-4904-85c6-f10945d269cd","Type":"ContainerStarted","Data":"d3bf42c33dc48e3c9e82216d327a85660122561040e927e2f260ffc588017cab"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.721584 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" event={"ID":"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5","Type":"ContainerStarted","Data":"3e58256a8115290a67afa486edcaf94db5e391989feb1a6d6b88a5f4ef530e25"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.722257 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" event={"ID":"a3b08810-f132-4eae-99fa-5a68c197e52b","Type":"ContainerStarted","Data":"88afed137a7dd7cf134c7c5c6e53f7c83ab430601428a359dc4287f59dceb47d"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.755675 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q4v9s" podStartSLOduration=122.755658015 podStartE2EDuration="2m2.755658015s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:08.754606192 +0000 UTC m=+142.896728895" watchObservedRunningTime="2025-12-05 11:10:08.755658015 +0000 UTC m=+142.897780708" Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.757143 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.757421 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.257411209 +0000 UTC m=+143.399533902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.826986 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" event={"ID":"40a0ccb9-a83b-4836-8864-5687d054d330","Type":"ContainerStarted","Data":"f0874a0af865c7b5724cf5e8e6dd4992628d7c69f5e139761a63e81ca6fd5cfd"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.853166 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:08 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:08 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:08 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.853224 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.854291 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" event={"ID":"f4d671a2-6454-4bf6-a099-0c0e15de2f20","Type":"ContainerStarted","Data":"0dc050e23d94d26ed1a6622ab7948fe873fa3b5bc792278344c986374105ed08"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.854324 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" event={"ID":"f4d671a2-6454-4bf6-a099-0c0e15de2f20","Type":"ContainerStarted","Data":"703378630cc63370344f2f8c5ff0803f23be7122e41c205f905c45fd0b47d542"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.860406 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.861260 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.361248552 +0000 UTC m=+143.503371245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.862783 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" event={"ID":"99f17d34-cfff-4706-af23-04fff3d500bd","Type":"ContainerStarted","Data":"c672946fa9887d9404f945d7911eab74241e39ebb175e67449b658089161d467"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.865241 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" event={"ID":"4c3990f9-726a-43a5-a84b-eea529806652","Type":"ContainerStarted","Data":"4f4b0182cb095bca200fb6b1e3215c47f274637a2b00a375936fd1e0d1655f73"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.865919 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.866928 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" event={"ID":"43ab850c-2ecf-49b9-b0ff-9f49befb53c3","Type":"ContainerStarted","Data":"46793633786ea3940fee0cc8e20ab536ffe9914c6a37536b138edfc3bc96305b"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.867706 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" event={"ID":"86cfa1e7-7206-404d-bc2d-bb34f50980ef","Type":"ContainerStarted","Data":"7f62f4738cad637e83b88dbde5f14af8a6e151c3512de62003cd67f2627a5eec"} Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.962954 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:08 crc kubenswrapper[4728]: I1205 11:10:08.963544 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vllpv" podStartSLOduration=122.963526048 podStartE2EDuration="2m2.963526048s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:08.962068003 +0000 UTC m=+143.104190716" watchObservedRunningTime="2025-12-05 11:10:08.963526048 +0000 UTC m=+143.105648731" Dec 05 11:10:08 crc kubenswrapper[4728]: E1205 11:10:08.963862 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.463847518 +0000 UTC m=+143.605970211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.006510 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" event={"ID":"132479f7-af34-420b-821f-34c11e07b06e","Type":"ContainerStarted","Data":"0931b48ca07ea7b555c7638c1c2abb83f414a7e9d997c06d780476e20ebd796b"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.040997 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" podStartSLOduration=123.040977262 podStartE2EDuration="2m3.040977262s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.0405611 +0000 UTC m=+143.182683803" watchObservedRunningTime="2025-12-05 11:10:09.040977262 +0000 UTC m=+143.183099955" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.041074 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kwdb8" event={"ID":"af6fc955-4b84-4c02-a7b0-a3272f9fbf61","Type":"ContainerStarted","Data":"f7120fa478ebac90ae63ffdf26e65972211d1c822336d3d3430d454d9277d362"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.041446 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" podStartSLOduration=123.041441587 podStartE2EDuration="2m3.041441587s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:08.989473363 +0000 UTC m=+143.131596056" watchObservedRunningTime="2025-12-05 11:10:09.041441587 +0000 UTC m=+143.183564280" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.049222 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" event={"ID":"26d19364-b381-49b6-bf41-9cfe831484f1","Type":"ContainerStarted","Data":"ffd508ee455fe5440551602903a15db67c8caea62ddbd428dc98622a1d1b96ad"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.060171 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" event={"ID":"8f6033c2-7f61-434b-a2c1-e58530ab4196","Type":"ContainerStarted","Data":"c25aa2fef36205730c8bd6d8154732e593bc94326844dc9295e31e587b3d913d"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.071783 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.072027 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-kwdb8" podStartSLOduration=7.072012444 podStartE2EDuration="7.072012444s" podCreationTimestamp="2025-12-05 11:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.071724915 +0000 UTC m=+143.213847608" watchObservedRunningTime="2025-12-05 11:10:09.072012444 +0000 UTC m=+143.214135137" Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.073103 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.573090297 +0000 UTC m=+143.715212990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.137963 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerStarted","Data":"9e443aa647c8f076eef94a9880a5d577964ac3e64b1a8f8f257d40b958a35e91"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.172039 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jr65f" event={"ID":"e0939ff9-1920-4663-84d4-4cef2e8e3588","Type":"ContainerStarted","Data":"94b529f7dcf35239b0fa9d9632dbcf56a1b92adede3725ae1b8d50401d0acbd6"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.173077 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.173404 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.673391982 +0000 UTC m=+143.815514675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.237068 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" event={"ID":"e00af11a-2662-4b44-9140-5c8d3b5f2834","Type":"ContainerStarted","Data":"0f3336cbbea76364a4eff08277b3528434eb37e807c6a6707d70e3eb2d8851fc"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.238365 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-jr65f" podStartSLOduration=8.238352694 podStartE2EDuration="8.238352694s" podCreationTimestamp="2025-12-05 11:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.237647992 +0000 UTC m=+143.379770685" watchObservedRunningTime="2025-12-05 11:10:09.238352694 +0000 UTC m=+143.380475407" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.290618 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" event={"ID":"51182eb7-b5c6-4108-95cb-c7835d473ae1","Type":"ContainerStarted","Data":"5d082ef99e1edce2ea5ba914ed37a5df4592952db753e02fbbdd0bea46b82983"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.291242 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.291540 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.791528284 +0000 UTC m=+143.933650977 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.360751 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" event={"ID":"f1ff90d9-6a24-416a-a94b-64160dc3d1e2","Type":"ContainerStarted","Data":"16fefae135f4cfbc53913ebb33cbe8f6b17d0da674215e197bda386855230e0c"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.392112 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.392903 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.892889242 +0000 UTC m=+144.035011935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.406228 4728 generic.go:334] "Generic (PLEG): container finished" podID="6c786bc2-e4d2-4402-a944-d21132d6087b" containerID="f6df98485e1208d9b16d0d493b2ca192778115abdee5b5b4526f7dca89ac90f1" exitCode=0 Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.406933 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" event={"ID":"6c786bc2-e4d2-4402-a944-d21132d6087b","Type":"ContainerDied","Data":"f6df98485e1208d9b16d0d493b2ca192778115abdee5b5b4526f7dca89ac90f1"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.421687 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p75qw" podStartSLOduration=123.421662904 podStartE2EDuration="2m3.421662904s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.420078515 +0000 UTC m=+143.562201228" watchObservedRunningTime="2025-12-05 11:10:09.421662904 +0000 UTC m=+143.563785597" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.425023 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" event={"ID":"1e090228-7348-4896-8ef7-a14137325478","Type":"ContainerStarted","Data":"3f7e13074beb485cd23b72daf21d846fe8b3b9604c71b3039cd95683f851696f"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.425081 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" event={"ID":"1e090228-7348-4896-8ef7-a14137325478","Type":"ContainerStarted","Data":"676b2fd023b193e1efbef439317064207fec717b2905846c91dd9b7aa15adfcc"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.425684 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.434266 4728 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-424hr container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.434351 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" podUID="1e090228-7348-4896-8ef7-a14137325478" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.442871 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" event={"ID":"977887c0-1f95-4b49-ac6e-34d90aa8d305","Type":"ContainerStarted","Data":"c0bb6b387058cae05aed29d2da2e62110acc2ef00b92696a6e25d0429d7d06ff"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.442952 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" event={"ID":"977887c0-1f95-4b49-ac6e-34d90aa8d305","Type":"ContainerStarted","Data":"ab0a9117d6caf18e60b3a78b47f1a04c98f2e9f3cd634f1cdec9b3e7ea5340a7"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.444312 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.462776 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" event={"ID":"ac5d7238-f656-4446-9620-49a1ea4f677f","Type":"ContainerStarted","Data":"0cedf200840afe37391d36fffed5ba27e40aab5c61073bcf090b4992a18ab7d2"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.480102 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" event={"ID":"1ce2658a-0a84-40c6-8e42-e83736811aa1","Type":"ContainerStarted","Data":"f1661302a6b671c78bcfd38162aa1ac1f80c494a625b95902289ca9ea1d45301"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.481704 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.482019 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" event={"ID":"709fe194-7202-4841-a79c-1bd440f108d2","Type":"ContainerStarted","Data":"0f7cc317222f1700aaaeec6c79e55e18282fe3265741bea51adb08b435d19832"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.493746 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" event={"ID":"bf109eee-6f22-421a-bcca-c9eda9726830","Type":"ContainerStarted","Data":"dcff32357f48fa047e5bfa9ef8280f865ac24944b3950b004af93fce43fd7a62"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.495723 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.497066 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:09.997054665 +0000 UTC m=+144.139177358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.499983 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" event={"ID":"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b","Type":"ContainerStarted","Data":"a4782b4a10ed79f65939acde32e3482ee2fa5a985ce8d005ba3a11b9fd33449e"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.509678 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-76cfh" event={"ID":"7f4d6517-ec6f-44c7-bd21-b74438d2b456","Type":"ContainerStarted","Data":"aa191ed298e6012551dddcc90b7624e4b6a45635d917f076bd5aacefa6631808"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.511904 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" event={"ID":"8d5bba3b-8ec6-4325-80d2-0d3c24b10987","Type":"ContainerStarted","Data":"2c1ca30651cd07bc38be176e7fa528819cab035267193acc8cdb046c9c0788e6"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.513080 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gd7w5" event={"ID":"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d","Type":"ContainerStarted","Data":"6cfd283098d8fcd3892d3906907d8e41aabc0c3d5c7021f2af88c0db435177b3"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.518494 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" event={"ID":"d92fe6c3-10f5-4151-86cb-236a4c79463b","Type":"ContainerStarted","Data":"549d7e8a35ce1e67fd7d353cd12d1d38a2a6396fe71f2ef823d85afbf55ee90f"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.519257 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.522447 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" event={"ID":"ad2b0cc2-0295-417f-8122-0db0f8f71400","Type":"ContainerStarted","Data":"703bcb8733ba61e909dfb804e43fe858194bab0174476700086be5366146edf8"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.522511 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" event={"ID":"ad2b0cc2-0295-417f-8122-0db0f8f71400","Type":"ContainerStarted","Data":"cb178ff6b24465e87c00f71532c1013f8b171169265acf779bf688991bbc97b6"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.529213 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" event={"ID":"e4952f54-bca0-4cae-b32a-f9d8cb0bb91c","Type":"ContainerStarted","Data":"252b9f90d86ef368f2f18801d29855ff75c46756b82412c03e6534a2c7bfcae7"} Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.529990 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.530045 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.547804 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.597526 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.599668 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.099649101 +0000 UTC m=+144.241771794 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.685372 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.695711 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" podStartSLOduration=123.695695285 podStartE2EDuration="2m3.695695285s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.695100437 +0000 UTC m=+143.837223140" watchObservedRunningTime="2025-12-05 11:10:09.695695285 +0000 UTC m=+143.837817978" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.699355 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.699672 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.199661517 +0000 UTC m=+144.341784210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.806323 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.806730 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.306714169 +0000 UTC m=+144.448836862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.840806 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" podStartSLOduration=123.840775163 podStartE2EDuration="2m3.840775163s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.839384981 +0000 UTC m=+143.981507664" watchObservedRunningTime="2025-12-05 11:10:09.840775163 +0000 UTC m=+143.982897856" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.841252 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f7jqh" podStartSLOduration=123.841246718 podStartE2EDuration="2m3.841246718s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.795892788 +0000 UTC m=+143.938015501" watchObservedRunningTime="2025-12-05 11:10:09.841246718 +0000 UTC m=+143.983369411" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.853775 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:09 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:09 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:09 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.853835 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.908686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:09 crc kubenswrapper[4728]: E1205 11:10:09.909044 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.409033686 +0000 UTC m=+144.551156379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:09 crc kubenswrapper[4728]: I1205 11:10:09.965494 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" podStartSLOduration=124.965475477 podStartE2EDuration="2m4.965475477s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.917166716 +0000 UTC m=+144.059289409" watchObservedRunningTime="2025-12-05 11:10:09.965475477 +0000 UTC m=+144.107598170" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.013848 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vrvkz" podStartSLOduration=125.013826119 podStartE2EDuration="2m5.013826119s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:09.965831758 +0000 UTC m=+144.107954441" watchObservedRunningTime="2025-12-05 11:10:10.013826119 +0000 UTC m=+144.155948822" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.098216 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.098603 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.598588788 +0000 UTC m=+144.740711481 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.101874 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" podStartSLOduration=124.101857378 podStartE2EDuration="2m4.101857378s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.016391498 +0000 UTC m=+144.158514191" watchObservedRunningTime="2025-12-05 11:10:10.101857378 +0000 UTC m=+144.243980091" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.201219 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.201840 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.701830103 +0000 UTC m=+144.843952796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.213271 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nvfz5" podStartSLOduration=124.213246463 podStartE2EDuration="2m4.213246463s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.103673704 +0000 UTC m=+144.245796407" watchObservedRunningTime="2025-12-05 11:10:10.213246463 +0000 UTC m=+144.355369176" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.213838 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" podStartSLOduration=124.213833011 podStartE2EDuration="2m4.213833011s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.20044018 +0000 UTC m=+144.342562873" watchObservedRunningTime="2025-12-05 11:10:10.213833011 +0000 UTC m=+144.355955704" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.302894 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.303213 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.803197851 +0000 UTC m=+144.945320544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.404198 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.404703 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:10.904684222 +0000 UTC m=+145.046807075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.505756 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.506222 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.006202945 +0000 UTC m=+145.148325638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.506262 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.506608 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.006598457 +0000 UTC m=+145.148721150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.543363 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" event={"ID":"26d19364-b381-49b6-bf41-9cfe831484f1","Type":"ContainerStarted","Data":"54ea29e497881526ded4e8b927714ef8476ec77dd004dcffdac679f7893ecdd0"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.545108 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.562121 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-76cfh" event={"ID":"7f4d6517-ec6f-44c7-bd21-b74438d2b456","Type":"ContainerStarted","Data":"7d5845a69d0f3c3c890ac0cfe27402e614fa97a1e9ec1998bd4c25144fec225d"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.563218 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.568955 4728 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-vh9hw container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.569015 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" podUID="26d19364-b381-49b6-bf41-9cfe831484f1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.577119 4728 patch_prober.go:28] interesting pod/console-operator-58897d9998-76cfh container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/readyz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.577185 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-76cfh" podUID="7f4d6517-ec6f-44c7-bd21-b74438d2b456" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/readyz\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.588032 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5x4xq" event={"ID":"99f17d34-cfff-4706-af23-04fff3d500bd","Type":"ContainerStarted","Data":"0d0111c8cb00eb9525264fbf69bd93fbd2e635bb6262014bce0290cdad360b50"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.598047 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" event={"ID":"8d5bba3b-8ec6-4325-80d2-0d3c24b10987","Type":"ContainerStarted","Data":"93f127c95af0147cb001b05bf5624cae4e212bbd9fd660e25b7fb039fd486d06"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.609345 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.609849 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.109831072 +0000 UTC m=+145.251953765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.616963 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" event={"ID":"86cfa1e7-7206-404d-bc2d-bb34f50980ef","Type":"ContainerStarted","Data":"4484ac0fae5f3a78b23028149b3ccdf7568a08a491dadd0a87640ab2fcfe956a"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.644130 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xszbp" event={"ID":"ad2b0cc2-0295-417f-8122-0db0f8f71400","Type":"ContainerStarted","Data":"9b407c5c2ed5c87f9101c440268000e6cfcd4a9194f78208fea7ebcbb8f2f39d"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.645975 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" event={"ID":"a3b08810-f132-4eae-99fa-5a68c197e52b","Type":"ContainerStarted","Data":"7eff5b6cc1917a120e71d8b97e18769b74d007c5403b26309a2109c2c75548ab"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.658034 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" event={"ID":"51182eb7-b5c6-4108-95cb-c7835d473ae1","Type":"ContainerStarted","Data":"e728733ab58afbb79b2f42b2bd3f856ff3c21f5faec2e9c0ff7217d5b2532c02"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.659897 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gd7w5" event={"ID":"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d","Type":"ContainerStarted","Data":"c73217b425886f405a2aeeef7dba48549ccc671648ab50dc1a156b9e8a2c35a5"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.663714 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-8z7kn" event={"ID":"282b6a91-2678-4bc8-8577-a5fdcc7e2f9b","Type":"ContainerStarted","Data":"eb440999856f0989c3f03775247a53c79e77777c3317d653c0bb02113d04b23b"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.684321 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" event={"ID":"709fe194-7202-4841-a79c-1bd440f108d2","Type":"ContainerStarted","Data":"e0f588632a81cf34d786817458e03a77cfb286528d0a48e8c6edb203c6eba8ad"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.685345 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" podStartSLOduration=124.685326697 podStartE2EDuration="2m4.685326697s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.626002488 +0000 UTC m=+144.768125191" watchObservedRunningTime="2025-12-05 11:10:10.685326697 +0000 UTC m=+144.827449390" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.708474 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" event={"ID":"e00af11a-2662-4b44-9140-5c8d3b5f2834","Type":"ContainerStarted","Data":"60a2bcdeedcafd775ac8ef4145526a759dc6d2955ea0aef2e3e7a5b20ebd1c4c"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.711397 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.712597 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.212582762 +0000 UTC m=+145.354705455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.723121 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kwdb8" event={"ID":"af6fc955-4b84-4c02-a7b0-a3272f9fbf61","Type":"ContainerStarted","Data":"b85b6b1fdadf8b3a2657e7a559859f4a99fd639a5639d7b7ee18a9b43ce6e818"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.734202 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" event={"ID":"40a0ccb9-a83b-4836-8864-5687d054d330","Type":"ContainerStarted","Data":"6d916e6dc3cdaa772e4f779fd42230803355bbc87321e36628ca7a00bc0ff859"} Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.749994 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.750092 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.763362 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-424hr" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.770676 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" podStartSLOduration=124.770656733 podStartE2EDuration="2m4.770656733s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.768140866 +0000 UTC m=+144.910263579" watchObservedRunningTime="2025-12-05 11:10:10.770656733 +0000 UTC m=+144.912779426" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.784012 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-76cfh" podStartSLOduration=124.783991022 podStartE2EDuration="2m4.783991022s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.68512332 +0000 UTC m=+144.827246033" watchObservedRunningTime="2025-12-05 11:10:10.783991022 +0000 UTC m=+144.926113715" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.813779 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.814632 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.314617711 +0000 UTC m=+145.456740404 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.849874 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:10 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:10 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:10 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.849946 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.919035 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:10 crc kubenswrapper[4728]: E1205 11:10:10.920764 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.420750815 +0000 UTC m=+145.562873508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:10 crc kubenswrapper[4728]: I1205 11:10:10.959537 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pq24q" podStartSLOduration=125.959517553 podStartE2EDuration="2m5.959517553s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:10.886855915 +0000 UTC m=+145.028978608" watchObservedRunningTime="2025-12-05 11:10:10.959517553 +0000 UTC m=+145.101640246" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.027555 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.027901 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.527874519 +0000 UTC m=+145.669997202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.028121 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.028438 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.528430046 +0000 UTC m=+145.670552729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.132357 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.132837 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.632818035 +0000 UTC m=+145.774940728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.233899 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.234302 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.734288156 +0000 UTC m=+145.876410849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.335318 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.335646 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.835622863 +0000 UTC m=+145.977745546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.336024 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.336396 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.836380106 +0000 UTC m=+145.978502809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.437614 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.438314 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:11.938293271 +0000 UTC m=+146.080415974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.475890 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.477102 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.479164 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.507562 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.539071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.539452 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.039434622 +0000 UTC m=+146.181557325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.639750 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.639967 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.139938763 +0000 UTC m=+146.282061456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.640053 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cw9t\" (UniqueName: \"kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.640169 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.640212 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.640261 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.640656 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.140640365 +0000 UTC m=+146.282763248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.660742 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.661926 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.664006 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.685948 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.740968 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.741132 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.741193 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cw9t\" (UniqueName: \"kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.741260 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.741874 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.742071 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.742125 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.242113566 +0000 UTC m=+146.384236259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.766978 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" event={"ID":"51182eb7-b5c6-4108-95cb-c7835d473ae1","Type":"ContainerStarted","Data":"1215c3082dc1410ad0ab17bd338e606a1112f4c190596b121f90798571f66a4e"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.767952 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.769591 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cw9t\" (UniqueName: \"kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t\") pod \"community-operators-m9cnn\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.789438 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" event={"ID":"8d5bba3b-8ec6-4325-80d2-0d3c24b10987","Type":"ContainerStarted","Data":"06ff02fb5caf096f835648709ee897369c39860c4b890e18b3c37544803fd7b7"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.794078 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" event={"ID":"8f6033c2-7f61-434b-a2c1-e58530ab4196","Type":"ContainerStarted","Data":"acc12aeaa9293ab3c100a46bca39a18dc4db24370f8639a04e92f556da8988f3"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.796038 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gd7w5" event={"ID":"c63f5e7e-bf41-4bea-bfdb-fca1914f4a5d","Type":"ContainerStarted","Data":"4ce660a408cd5b7692e7f1817b29ad160133282b32d701f1be515d968f894feb"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.796573 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.798360 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" event={"ID":"132479f7-af34-420b-821f-34c11e07b06e","Type":"ContainerStarted","Data":"af7445c185d1fbd72dacbe0ffb59d0d64dcaafa488f3da043a392401f92355c4"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.799677 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerStarted","Data":"6eb7a8c4de7b91719bcde13603bacdcace249ece06ce508ae7a9dfc739264d6b"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.800329 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.801437 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" event={"ID":"a3b08810-f132-4eae-99fa-5a68c197e52b","Type":"ContainerStarted","Data":"594d0f91bde9ab07588b8c927263e013f004211c987b7858a86a4e773c21d47d"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.805913 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.810500 4728 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2clxj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.810553 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.814480 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" event={"ID":"43ab850c-2ecf-49b9-b0ff-9f49befb53c3","Type":"ContainerStarted","Data":"90b505f492d3983ede9a4f32c5db604206e13589b31974258fc08bbdbbd4d479"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.817750 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" event={"ID":"709fe194-7202-4841-a79c-1bd440f108d2","Type":"ContainerStarted","Data":"4b75fa6f90d0e58ad95873683c6db3f3aad5315610e7966dcf8cce79d9be3c0a"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.820295 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" event={"ID":"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5","Type":"ContainerStarted","Data":"426b3a9c6a68ce9ba8deee64c71178e02f4342214324209d51bf10d0bbfd573f"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.822450 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" podStartSLOduration=125.822433328 podStartE2EDuration="2m5.822433328s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:11.818937211 +0000 UTC m=+145.961059904" watchObservedRunningTime="2025-12-05 11:10:11.822433328 +0000 UTC m=+145.964556021" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.822687 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" event={"ID":"bf109eee-6f22-421a-bcca-c9eda9726830","Type":"ContainerStarted","Data":"232037a89e5ecf39900873f0f00c42beb10a84ae47ea6ac3445caad275617fb4"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.846136 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.846270 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9f5s\" (UniqueName: \"kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.846304 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.846324 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.846595 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.346581599 +0000 UTC m=+146.488704292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.867034 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:11 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:11 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:11 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.867087 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.925508 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-4r2zd" podStartSLOduration=125.925490748 podStartE2EDuration="2m5.925490748s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:11.924759075 +0000 UTC m=+146.066881778" watchObservedRunningTime="2025-12-05 11:10:11.925490748 +0000 UTC m=+146.067613441" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.927579 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6l2f8"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.928418 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.929412 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" event={"ID":"e00af11a-2662-4b44-9140-5c8d3b5f2834","Type":"ContainerStarted","Data":"79760ddcc0ac03c9e13e73a201f1391b00448b0ae9c67a5e379e4c87be48bde1"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.949359 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.949885 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9f5s\" (UniqueName: \"kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.949939 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.950054 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.950414 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:11 crc kubenswrapper[4728]: E1205 11:10:11.950478 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.450464514 +0000 UTC m=+146.592587207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.951805 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" event={"ID":"6c786bc2-e4d2-4402-a944-d21132d6087b","Type":"ContainerStarted","Data":"4018e52ae09242382c4c923d1108fb708d54b193d47305aeb117614440f88998"} Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.974421 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l2f8"] Dec 05 11:10:11 crc kubenswrapper[4728]: I1205 11:10:11.990935 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.003286 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zrtl6" podStartSLOduration=126.003266333 podStartE2EDuration="2m6.003266333s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:11.992560564 +0000 UTC m=+146.134683257" watchObservedRunningTime="2025-12-05 11:10:12.003266333 +0000 UTC m=+146.145389026" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.010143 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8wd9p" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.028075 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vh9hw" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.052975 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-gd7w5" podStartSLOduration=11.052960096 podStartE2EDuration="11.052960096s" podCreationTimestamp="2025-12-05 11:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.052299076 +0000 UTC m=+146.194421769" watchObservedRunningTime="2025-12-05 11:10:12.052960096 +0000 UTC m=+146.195082789" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.054856 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.054902 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.055060 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.055127 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s5cm\" (UniqueName: \"kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.058985 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.55897402 +0000 UTC m=+146.701096713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.091866 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9f5s\" (UniqueName: \"kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s\") pod \"certified-operators-6zdxv\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.156287 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.156448 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.156500 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.156530 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s5cm\" (UniqueName: \"kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.156916 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.656902683 +0000 UTC m=+146.799025376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.157246 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.157458 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.174559 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g6zpg" podStartSLOduration=126.174540554 podStartE2EDuration="2m6.174540554s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.171732248 +0000 UTC m=+146.313854941" watchObservedRunningTime="2025-12-05 11:10:12.174540554 +0000 UTC m=+146.316663247" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.226453 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w5msx"] Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.235351 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s5cm\" (UniqueName: \"kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm\") pod \"community-operators-6l2f8\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.239105 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.246693 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" podStartSLOduration=126.246671125 podStartE2EDuration="2m6.246671125s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.245121618 +0000 UTC m=+146.387244311" watchObservedRunningTime="2025-12-05 11:10:12.246671125 +0000 UTC m=+146.388793828" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.257326 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.257599 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.75758798 +0000 UTC m=+146.899710673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.274610 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.286489 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w5msx"] Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.292588 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.296321 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-76cfh" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.349615 4728 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365425 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.365497 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.865484168 +0000 UTC m=+147.007606861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365628 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjxz9\" (UniqueName: \"kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365688 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365720 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365748 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.365398 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" podStartSLOduration=127.365379565 podStartE2EDuration="2m7.365379565s" podCreationTimestamp="2025-12-05 11:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.364956352 +0000 UTC m=+146.507079035" watchObservedRunningTime="2025-12-05 11:10:12.365379565 +0000 UTC m=+146.507502268" Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.366512 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.866501549 +0000 UTC m=+147.008624242 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-x8nh8" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.452722 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-lt4gz" podStartSLOduration=126.452701562 podStartE2EDuration="2m6.452701562s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.408512147 +0000 UTC m=+146.550634860" watchObservedRunningTime="2025-12-05 11:10:12.452701562 +0000 UTC m=+146.594824255" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.453531 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-c2jd4" podStartSLOduration=126.453524467 podStartE2EDuration="2m6.453524467s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.450265657 +0000 UTC m=+146.592388360" watchObservedRunningTime="2025-12-05 11:10:12.453524467 +0000 UTC m=+146.595647170" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.470215 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.470539 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.470609 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.470679 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjxz9\" (UniqueName: \"kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.470964 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: E1205 11:10:12.471040 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 11:10:12.971026004 +0000 UTC m=+147.113148697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.471232 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.508681 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjxz9\" (UniqueName: \"kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9\") pod \"certified-operators-w5msx\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.545880 4728 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T11:10:12.349648202Z","Handler":null,"Name":""} Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.559836 4728 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.559874 4728 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.561048 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hqq2x" podStartSLOduration=126.561019173 podStartE2EDuration="2m6.561019173s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.55407131 +0000 UTC m=+146.696194013" watchObservedRunningTime="2025-12-05 11:10:12.561019173 +0000 UTC m=+146.703141876" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.572483 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.577144 4728 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.577179 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.593193 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.665456 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gr77k" podStartSLOduration=126.665437324 podStartE2EDuration="2m6.665437324s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.654142548 +0000 UTC m=+146.796265251" watchObservedRunningTime="2025-12-05 11:10:12.665437324 +0000 UTC m=+146.807560017" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.799353 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.849575 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:12 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:12 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:12 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.849620 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:12 crc kubenswrapper[4728]: I1205 11:10:12.887168 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" podStartSLOduration=126.887150452 podStartE2EDuration="2m6.887150452s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:12.845188205 +0000 UTC m=+146.987310898" watchObservedRunningTime="2025-12-05 11:10:12.887150452 +0000 UTC m=+147.029273145" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.014245 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" event={"ID":"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5","Type":"ContainerStarted","Data":"276684dc74d8be39030fddec9d77af3e47d3088a9df6f1d40b3647f102734779"} Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.033652 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.036736 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerStarted","Data":"60e64ed66be09c9b1666cac08a30c4a79375a5e9183edd78e5462e8c08753389"} Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.038624 4728 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2clxj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.038663 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.054869 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l2f8"] Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.150808 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w5msx"] Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.178089 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-x8nh8\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.197151 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.210854 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.236111 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.672635 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.674078 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.678018 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.685286 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.709513 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5vf\" (UniqueName: \"kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.709605 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.709625 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.727073 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:10:13 crc kubenswrapper[4728]: W1205 11:10:13.738453 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaec2314f_9290_4eb4_a632_70baf826e29a.slice/crio-957e3ba0aa51d3d0bd4be462148604d25bd8feebccea184e06abc84531fafd96 WatchSource:0}: Error finding container 957e3ba0aa51d3d0bd4be462148604d25bd8feebccea184e06abc84531fafd96: Status 404 returned error can't find the container with id 957e3ba0aa51d3d0bd4be462148604d25bd8feebccea184e06abc84531fafd96 Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.811014 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5vf\" (UniqueName: \"kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.811162 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.811191 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.811690 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.811756 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.833602 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5vf\" (UniqueName: \"kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf\") pod \"redhat-marketplace-mjf89\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.840001 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:13 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:13 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:13 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.840063 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.932308 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.932650 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:13 crc kubenswrapper[4728]: I1205 11:10:13.939977 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.013488 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.013567 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.014442 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.018262 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.042226 4728 generic.go:334] "Generic (PLEG): container finished" podID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerID="baeda4fa68eaf44b8e42427d43b46f08b26922fa7cbbe09f1c07352b953d8265" exitCode=0 Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.042327 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerDied","Data":"baeda4fa68eaf44b8e42427d43b46f08b26922fa7cbbe09f1c07352b953d8265"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.043834 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.044067 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" event={"ID":"aec2314f-9290-4eb4-a632-70baf826e29a","Type":"ContainerStarted","Data":"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.044121 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" event={"ID":"aec2314f-9290-4eb4-a632-70baf826e29a","Type":"ContainerStarted","Data":"957e3ba0aa51d3d0bd4be462148604d25bd8feebccea184e06abc84531fafd96"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.044139 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.045250 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.045990 4728 generic.go:334] "Generic (PLEG): container finished" podID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerID="18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f" exitCode=0 Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.046048 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerDied","Data":"18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.046065 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerStarted","Data":"ca9d0729086b023564a5ee831852f0e934b24c3c643ea8dacd7e6c47412f78e9"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.048471 4728 generic.go:334] "Generic (PLEG): container finished" podID="86cfa1e7-7206-404d-bc2d-bb34f50980ef" containerID="4484ac0fae5f3a78b23028149b3ccdf7568a08a491dadd0a87640ab2fcfe956a" exitCode=0 Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.048549 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" event={"ID":"86cfa1e7-7206-404d-bc2d-bb34f50980ef","Type":"ContainerDied","Data":"4484ac0fae5f3a78b23028149b3ccdf7568a08a491dadd0a87640ab2fcfe956a"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.050305 4728 generic.go:334] "Generic (PLEG): container finished" podID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerID="df8211e03f0f3897ce247afeac0664f4f05b741dd33941d6ae4a3559fc75b7d4" exitCode=0 Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.050337 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerDied","Data":"df8211e03f0f3897ce247afeac0664f4f05b741dd33941d6ae4a3559fc75b7d4"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.050374 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerStarted","Data":"a79346779735f5f7500941fa415c5d0d811fd19a24336df3982a9c4bf79b263e"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.051946 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.052006 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.052763 4728 generic.go:334] "Generic (PLEG): container finished" podID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerID="3a56baf506365daa34cc2c52670bf1078508a32600c41ecc0b9139487e28471a" exitCode=0 Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.052831 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerDied","Data":"3a56baf506365daa34cc2c52670bf1078508a32600c41ecc0b9139487e28471a"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.052847 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerStarted","Data":"1184c4a12a475201716d900423b728132d2d529fa24876427011b4f23bb759c5"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.062390 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.062416 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.062444 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.062471 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.073943 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" event={"ID":"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5","Type":"ContainerStarted","Data":"689a5d676d4a7744fa0c5308b7308885d4052a07672949f2e8b2f2847fb6fda9"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.074010 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" event={"ID":"4b1a43d6-1fb2-48a3-acfe-e656d1dbeff5","Type":"ContainerStarted","Data":"d3e482ed11a20a04e6c75fd17e1dbc56e47c1ecf857a4d5f924d2c3606978235"} Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.075892 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.076408 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.097176 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-pdjw2" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.099625 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.100608 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.109942 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.114613 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.114825 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.121937 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.122695 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.123385 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.132479 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.142999 4728 patch_prober.go:28] interesting pod/console-f9d7485db-f6bjc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.24:8443/health\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.143243 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-f6bjc" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.24:8443/health\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.186287 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" podStartSLOduration=128.186265041 podStartE2EDuration="2m8.186265041s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:14.152318 +0000 UTC m=+148.294440713" watchObservedRunningTime="2025-12-05 11:10:14.186265041 +0000 UTC m=+148.328387744" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.187913 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ppfpq" podStartSLOduration=13.187904491 podStartE2EDuration="13.187904491s" podCreationTimestamp="2025-12-05 11:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:14.184688493 +0000 UTC m=+148.326811196" watchObservedRunningTime="2025-12-05 11:10:14.187904491 +0000 UTC m=+148.330027184" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.189355 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.321726 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.322234 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qswlx\" (UniqueName: \"kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.322265 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.359411 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.367446 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.376523 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.429135 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.429219 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qswlx\" (UniqueName: \"kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.429249 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.429741 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.429909 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.460848 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qswlx\" (UniqueName: \"kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx\") pod \"redhat-marketplace-5wcjm\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.469817 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.506182 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.661231 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.667904 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.675193 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.683642 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.733665 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.733820 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv7z6\" (UniqueName: \"kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.733902 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.742172 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.743603 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.747949 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.748264 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.758662 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.835985 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.837408 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.839520 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv7z6\" (UniqueName: \"kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.840683 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.840854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.840919 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.841518 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:14 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:14 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:14 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.841548 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.844614 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.844648 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.878044 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv7z6\" (UniqueName: \"kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6\") pod \"redhat-operators-tfbzt\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.942026 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.942107 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.944003 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:14 crc kubenswrapper[4728]: I1205 11:10:14.980375 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.026893 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.074097 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.093369 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qrl2q"] Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.094629 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.123030 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrl2q"] Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.130505 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.157016 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerStarted","Data":"22ac0437160a1b150b0cf5a48ab1f7c6bf6b12f68c51b49c3f18e201f9a4e63a"} Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.182657 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"737d8f4429704a0e3eb0da16d6d38d53a7260dff66eb8f34ce2782f8d47d250e"} Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.184166 4728 generic.go:334] "Generic (PLEG): container finished" podID="09c93128-1454-446f-bb75-771442084d74" containerID="d82e7148a1c3c0c0994c4f3c53aa47cb425343ae0cb4f9b0324fb28e2c81ad1c" exitCode=0 Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.185173 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerDied","Data":"d82e7148a1c3c0c0994c4f3c53aa47cb425343ae0cb4f9b0324fb28e2c81ad1c"} Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.185194 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerStarted","Data":"4b00e1446dc77e862ec76757433e5d47267e5efe9adcedd82f2ff6ef6e9c0264"} Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.193598 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8d796" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.251869 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.252679 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c45tr\" (UniqueName: \"kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.252876 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.354045 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.354261 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.354383 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c45tr\" (UniqueName: \"kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.361742 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.364481 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.416825 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c45tr\" (UniqueName: \"kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr\") pod \"redhat-operators-qrl2q\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.428377 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.510375 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.570133 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.661823 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") pod \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.662250 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5d5g\" (UniqueName: \"kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g\") pod \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.662306 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume\") pod \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\" (UID: \"86cfa1e7-7206-404d-bc2d-bb34f50980ef\") " Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.662627 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume" (OuterVolumeSpecName: "config-volume") pod "86cfa1e7-7206-404d-bc2d-bb34f50980ef" (UID: "86cfa1e7-7206-404d-bc2d-bb34f50980ef"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.669648 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "86cfa1e7-7206-404d-bc2d-bb34f50980ef" (UID: "86cfa1e7-7206-404d-bc2d-bb34f50980ef"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.670111 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g" (OuterVolumeSpecName: "kube-api-access-g5d5g") pod "86cfa1e7-7206-404d-bc2d-bb34f50980ef" (UID: "86cfa1e7-7206-404d-bc2d-bb34f50980ef"). InnerVolumeSpecName "kube-api-access-g5d5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.691773 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.763419 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86cfa1e7-7206-404d-bc2d-bb34f50980ef-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.763462 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5d5g\" (UniqueName: \"kubernetes.io/projected/86cfa1e7-7206-404d-bc2d-bb34f50980ef-kube-api-access-g5d5g\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.763477 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86cfa1e7-7206-404d-bc2d-bb34f50980ef-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:15 crc kubenswrapper[4728]: W1205 11:10:15.784967 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd23227da_1d05_48e3_aff6_172498589933.slice/crio-6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c WatchSource:0}: Error finding container 6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c: Status 404 returned error can't find the container with id 6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.842488 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:15 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:15 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:15 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.842881 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:15 crc kubenswrapper[4728]: I1205 11:10:15.867119 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrl2q"] Dec 05 11:10:15 crc kubenswrapper[4728]: W1205 11:10:15.897159 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23bda114_880c_46d2_ba79_57a723a6b547.slice/crio-4d41df0564d22de12b4695b1d1339fbf1e63ef4d9fe54c515e26afbab888ba20 WatchSource:0}: Error finding container 4d41df0564d22de12b4695b1d1339fbf1e63ef4d9fe54c515e26afbab888ba20: Status 404 returned error can't find the container with id 4d41df0564d22de12b4695b1d1339fbf1e63ef4d9fe54c515e26afbab888ba20 Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.218507 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1ff9dc69ef60b531cd53a3693dfacea4267c17604659c621a80f98052a9c27a0"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.218767 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1a4881f43b4538bc0c6b4f744096d23ffce8cb43a91bab9138781220552df53f"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.238032 4728 generic.go:334] "Generic (PLEG): container finished" podID="23bda114-880c-46d2-ba79-57a723a6b547" containerID="07fde169d36645af2ad129bc615ca4ecc6cb4a40f79a01e4c8b1a3c63fd41486" exitCode=0 Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.238135 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerDied","Data":"07fde169d36645af2ad129bc615ca4ecc6cb4a40f79a01e4c8b1a3c63fd41486"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.238171 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerStarted","Data":"4d41df0564d22de12b4695b1d1339fbf1e63ef4d9fe54c515e26afbab888ba20"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.249281 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0bddae72f479c400ece602ba267ae19e81fc7b26a7d6ffeccd25f1f584304ace"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.249325 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"365c16cf48f8522214e169336d9a47a0c23261cc97fb8dcd4812a78f3a0cba49"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.249863 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.301171 4728 generic.go:334] "Generic (PLEG): container finished" podID="09543444-057d-42b7-a103-6af978f7c627" containerID="1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1" exitCode=0 Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.301292 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerDied","Data":"1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.307623 4728 generic.go:334] "Generic (PLEG): container finished" podID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerID="ef5b720cd65b8cbfc8aea692fe775d80420c8f2f47ab25a370581a3f771043a4" exitCode=0 Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.307688 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerDied","Data":"ef5b720cd65b8cbfc8aea692fe775d80420c8f2f47ab25a370581a3f771043a4"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.307713 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerStarted","Data":"d85b6c1cda777acb7d234a054a3ebf04e2a683c1c62802ea1144c7f49528158c"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.319021 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"259f9b04bd18e9bcb0518b6134f10a839ff903a754b6f205ff7e1f555893f1ed"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.332205 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d23227da-1d05-48e3-aff6-172498589933","Type":"ContainerStarted","Data":"6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.336734 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.336758 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5" event={"ID":"86cfa1e7-7206-404d-bc2d-bb34f50980ef","Type":"ContainerDied","Data":"7f62f4738cad637e83b88dbde5f14af8a6e151c3512de62003cd67f2627a5eec"} Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.336783 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f62f4738cad637e83b88dbde5f14af8a6e151c3512de62003cd67f2627a5eec" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.620720 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 11:10:16 crc kubenswrapper[4728]: E1205 11:10:16.620933 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86cfa1e7-7206-404d-bc2d-bb34f50980ef" containerName="collect-profiles" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.620945 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="86cfa1e7-7206-404d-bc2d-bb34f50980ef" containerName="collect-profiles" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.621043 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="86cfa1e7-7206-404d-bc2d-bb34f50980ef" containerName="collect-profiles" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.621375 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.624462 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.624888 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.640678 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.783603 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.783937 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.905030 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.905106 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.905913 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.925336 4728 patch_prober.go:28] interesting pod/router-default-5444994796-c6s8n container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 11:10:16 crc kubenswrapper[4728]: [-]has-synced failed: reason withheld Dec 05 11:10:16 crc kubenswrapper[4728]: [+]process-running ok Dec 05 11:10:16 crc kubenswrapper[4728]: healthz check failed Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.925442 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c6s8n" podUID="0cbb9d62-be02-4410-bdc2-c0179576d8ed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.938616 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:16 crc kubenswrapper[4728]: I1205 11:10:16.964203 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:17 crc kubenswrapper[4728]: I1205 11:10:17.536079 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d23227da-1d05-48e3-aff6-172498589933","Type":"ContainerStarted","Data":"bf88c742c814e9f7d6a0967bceab6ed680e87bb0e680bed05cfe90dac3498fac"} Dec 05 11:10:17 crc kubenswrapper[4728]: I1205 11:10:17.557610 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.557585812 podStartE2EDuration="3.557585812s" podCreationTimestamp="2025-12-05 11:10:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:17.555254811 +0000 UTC m=+151.697377504" watchObservedRunningTime="2025-12-05 11:10:17.557585812 +0000 UTC m=+151.699708505" Dec 05 11:10:17 crc kubenswrapper[4728]: I1205 11:10:17.843204 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:17 crc kubenswrapper[4728]: I1205 11:10:17.850470 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-c6s8n" Dec 05 11:10:18 crc kubenswrapper[4728]: I1205 11:10:18.060146 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 11:10:18 crc kubenswrapper[4728]: I1205 11:10:18.558758 4728 generic.go:334] "Generic (PLEG): container finished" podID="d23227da-1d05-48e3-aff6-172498589933" containerID="bf88c742c814e9f7d6a0967bceab6ed680e87bb0e680bed05cfe90dac3498fac" exitCode=0 Dec 05 11:10:18 crc kubenswrapper[4728]: I1205 11:10:18.558944 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d23227da-1d05-48e3-aff6-172498589933","Type":"ContainerDied","Data":"bf88c742c814e9f7d6a0967bceab6ed680e87bb0e680bed05cfe90dac3498fac"} Dec 05 11:10:18 crc kubenswrapper[4728]: I1205 11:10:18.574246 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"568c017e-4872-4730-98f6-0903bce55968","Type":"ContainerStarted","Data":"de49fd1b7b9263a33b425ef6297608de3ed59c0fc866aed7552d7c3bf1f998e2"} Dec 05 11:10:19 crc kubenswrapper[4728]: I1205 11:10:19.584321 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"568c017e-4872-4730-98f6-0903bce55968","Type":"ContainerStarted","Data":"976d1d8b88084b69a7442fe0e6e79c16214c1675682bd4a1b3071c28cfef3a33"} Dec 05 11:10:19 crc kubenswrapper[4728]: I1205 11:10:19.695919 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-gd7w5" Dec 05 11:10:19 crc kubenswrapper[4728]: I1205 11:10:19.971783 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.017373 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access\") pod \"d23227da-1d05-48e3-aff6-172498589933\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.017523 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir\") pod \"d23227da-1d05-48e3-aff6-172498589933\" (UID: \"d23227da-1d05-48e3-aff6-172498589933\") " Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.017891 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d23227da-1d05-48e3-aff6-172498589933" (UID: "d23227da-1d05-48e3-aff6-172498589933"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.043092 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d23227da-1d05-48e3-aff6-172498589933" (UID: "d23227da-1d05-48e3-aff6-172498589933"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.119918 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d23227da-1d05-48e3-aff6-172498589933-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.119971 4728 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d23227da-1d05-48e3-aff6-172498589933-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.595014 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d23227da-1d05-48e3-aff6-172498589933","Type":"ContainerDied","Data":"6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c"} Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.595082 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fbd58b0eb87ff94037c4e67cb0066c08f78a7d5a46995e2f580b3892c62606c" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.595080 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 11:10:20 crc kubenswrapper[4728]: I1205 11:10:20.610218 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.610198691 podStartE2EDuration="4.610198691s" podCreationTimestamp="2025-12-05 11:10:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:20.607649133 +0000 UTC m=+154.749771836" watchObservedRunningTime="2025-12-05 11:10:20.610198691 +0000 UTC m=+154.752321384" Dec 05 11:10:21 crc kubenswrapper[4728]: I1205 11:10:21.609039 4728 generic.go:334] "Generic (PLEG): container finished" podID="568c017e-4872-4730-98f6-0903bce55968" containerID="976d1d8b88084b69a7442fe0e6e79c16214c1675682bd4a1b3071c28cfef3a33" exitCode=0 Dec 05 11:10:21 crc kubenswrapper[4728]: I1205 11:10:21.609109 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"568c017e-4872-4730-98f6-0903bce55968","Type":"ContainerDied","Data":"976d1d8b88084b69a7442fe0e6e79c16214c1675682bd4a1b3071c28cfef3a33"} Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.060691 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.061126 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.060706 4728 patch_prober.go:28] interesting pod/downloads-7954f5f757-lgb9x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.061166 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-lgb9x" podUID="b21b7f08-1b74-4ed3-8a78-f6a03b514069" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.130870 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:24 crc kubenswrapper[4728]: I1205 11:10:24.135395 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:10:25 crc kubenswrapper[4728]: I1205 11:10:25.701748 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:10:25 crc kubenswrapper[4728]: I1205 11:10:25.702133 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.045372 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.065987 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99a5c711-5c13-4615-93fc-9fbf02ce54ca-metrics-certs\") pod \"network-metrics-daemon-2dq9w\" (UID: \"99a5c711-5c13-4615-93fc-9fbf02ce54ca\") " pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.276289 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2dq9w" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.824985 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.856314 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir\") pod \"568c017e-4872-4730-98f6-0903bce55968\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.856374 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access\") pod \"568c017e-4872-4730-98f6-0903bce55968\" (UID: \"568c017e-4872-4730-98f6-0903bce55968\") " Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.856449 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "568c017e-4872-4730-98f6-0903bce55968" (UID: "568c017e-4872-4730-98f6-0903bce55968"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.856691 4728 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/568c017e-4872-4730-98f6-0903bce55968-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.862871 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "568c017e-4872-4730-98f6-0903bce55968" (UID: "568c017e-4872-4730-98f6-0903bce55968"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[4728]: I1205 11:10:28.958420 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/568c017e-4872-4730-98f6-0903bce55968-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:29 crc kubenswrapper[4728]: I1205 11:10:29.664028 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"568c017e-4872-4730-98f6-0903bce55968","Type":"ContainerDied","Data":"de49fd1b7b9263a33b425ef6297608de3ed59c0fc866aed7552d7c3bf1f998e2"} Dec 05 11:10:29 crc kubenswrapper[4728]: I1205 11:10:29.664064 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de49fd1b7b9263a33b425ef6297608de3ed59c0fc866aed7552d7c3bf1f998e2" Dec 05 11:10:29 crc kubenswrapper[4728]: I1205 11:10:29.664075 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 11:10:29 crc kubenswrapper[4728]: I1205 11:10:29.894129 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:10:33 crc kubenswrapper[4728]: I1205 11:10:33.218397 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:10:34 crc kubenswrapper[4728]: I1205 11:10:34.067963 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-lgb9x" Dec 05 11:10:44 crc kubenswrapper[4728]: I1205 11:10:44.941472 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7fhtn" Dec 05 11:10:52 crc kubenswrapper[4728]: E1205 11:10:52.762569 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 11:10:52 crc kubenswrapper[4728]: E1205 11:10:52.763101 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9s5cm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6l2f8_openshift-marketplace(7489957c-ebb7-4902-9617-9a1287ccccb4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:52 crc kubenswrapper[4728]: E1205 11:10:52.765048 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6l2f8" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.015511 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 11:10:54 crc kubenswrapper[4728]: E1205 11:10:54.016290 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568c017e-4872-4730-98f6-0903bce55968" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.016306 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="568c017e-4872-4730-98f6-0903bce55968" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: E1205 11:10:54.016317 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d23227da-1d05-48e3-aff6-172498589933" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.016347 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d23227da-1d05-48e3-aff6-172498589933" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.016489 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d23227da-1d05-48e3-aff6-172498589933" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.016506 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="568c017e-4872-4730-98f6-0903bce55968" containerName="pruner" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.017285 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.022203 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.022468 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.043484 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.075381 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.076413 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.178642 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.178722 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.178849 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.204033 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:54 crc kubenswrapper[4728]: I1205 11:10:54.356655 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:10:55 crc kubenswrapper[4728]: I1205 11:10:55.153189 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 11:10:55 crc kubenswrapper[4728]: I1205 11:10:55.701801 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:10:55 crc kubenswrapper[4728]: I1205 11:10:55.701869 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.164256 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6l2f8" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.215199 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.215540 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2cw9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-m9cnn_openshift-marketplace(48f4f7f9-a366-44ba-b8ee-f349da78fa76): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.216770 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-m9cnn" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.233921 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.234098 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fjxz9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-w5msx_openshift-marketplace(10176a9a-24a6-4a05-a9a7-b91062c87c9b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:56 crc kubenswrapper[4728]: E1205 11:10:56.236452 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-w5msx" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.499882 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-m9cnn" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.500030 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-w5msx" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.570324 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.571143 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wv5vf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mjf89_openshift-marketplace(09c93128-1454-446f-bb75-771442084d74): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.572506 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mjf89" podUID="09c93128-1454-446f-bb75-771442084d74" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.593383 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.593564 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m9f5s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-6zdxv_openshift-marketplace(a443dace-dc6a-4488-a0d1-183a1198bd0d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.594809 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-6zdxv" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.629246 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.629427 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qswlx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5wcjm_openshift-marketplace(09543444-057d-42b7-a103-6af978f7c627): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:10:57 crc kubenswrapper[4728]: E1205 11:10:57.630607 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5wcjm" podUID="09543444-057d-42b7-a103-6af978f7c627" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.010741 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.013024 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.018442 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.148440 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.148548 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.148657 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.250177 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.250254 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.250306 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.250400 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.250456 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.269342 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access\") pod \"installer-9-crc\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:10:59 crc kubenswrapper[4728]: I1205 11:10:59.339876 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.661111 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-6zdxv" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.661151 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-5wcjm" podUID="09543444-057d-42b7-a103-6af978f7c627" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.661377 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mjf89" podUID="09c93128-1454-446f-bb75-771442084d74" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.838271 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.838850 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c45tr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qrl2q_openshift-marketplace(23bda114-880c-46d2-ba79-57a723a6b547): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.840038 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qrl2q" podUID="23bda114-880c-46d2-ba79-57a723a6b547" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.859001 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qrl2q" podUID="23bda114-880c-46d2-ba79-57a723a6b547" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.860013 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.860137 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rv7z6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tfbzt_openshift-marketplace(29c0e6c0-78ea-4a37-aa57-22af46f50133): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 11:11:01 crc kubenswrapper[4728]: E1205 11:11:01.861300 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-tfbzt" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" Dec 05 11:11:01 crc kubenswrapper[4728]: I1205 11:11:01.925282 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.065062 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2dq9w"] Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.155112 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.862321 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" event={"ID":"99a5c711-5c13-4615-93fc-9fbf02ce54ca","Type":"ContainerStarted","Data":"7bea7646cb3ac4eaab06d8e43a30783ecf4b7a108b5cb9c98dffe84273a33e82"} Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.862876 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" event={"ID":"99a5c711-5c13-4615-93fc-9fbf02ce54ca","Type":"ContainerStarted","Data":"1fcb9ad132e61d80af43601d26434916ccb9e4177cd001d4a885b9f74da6c3c8"} Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.865091 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1dc5e673-e410-40d7-b17d-63b09d59d6ee","Type":"ContainerStarted","Data":"a3fc05a4cdbc4b3e3702320165f0277e20563a11346a9f2335ebd904e0b86e4e"} Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.865115 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1dc5e673-e410-40d7-b17d-63b09d59d6ee","Type":"ContainerStarted","Data":"eb7a70d5ad04151c44bb0ab21451da393ea4b0b7bfc80923270f32c2b0c8848a"} Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.867393 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bc456c39-c3f9-4e33-974b-f0627ac7a228","Type":"ContainerStarted","Data":"5ee07bc6c12dbbd6aabe75f5bd4c0b4ee18213d042a88e659005105366e15c59"} Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.867434 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bc456c39-c3f9-4e33-974b-f0627ac7a228","Type":"ContainerStarted","Data":"2dc4cdcdd64d34d6ee3a539e1f44db34d3672d22512617c5c97e93f3fa862d2c"} Dec 05 11:11:02 crc kubenswrapper[4728]: E1205 11:11:02.870287 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tfbzt" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.889696 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=8.889661163 podStartE2EDuration="8.889661163s" podCreationTimestamp="2025-12-05 11:10:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:02.876127628 +0000 UTC m=+197.018250321" watchObservedRunningTime="2025-12-05 11:11:02.889661163 +0000 UTC m=+197.031783856" Dec 05 11:11:02 crc kubenswrapper[4728]: I1205 11:11:02.924264 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.924225133 podStartE2EDuration="4.924225133s" podCreationTimestamp="2025-12-05 11:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:02.920169618 +0000 UTC m=+197.062292311" watchObservedRunningTime="2025-12-05 11:11:02.924225133 +0000 UTC m=+197.066347826" Dec 05 11:11:03 crc kubenswrapper[4728]: I1205 11:11:03.873675 4728 generic.go:334] "Generic (PLEG): container finished" podID="1dc5e673-e410-40d7-b17d-63b09d59d6ee" containerID="a3fc05a4cdbc4b3e3702320165f0277e20563a11346a9f2335ebd904e0b86e4e" exitCode=0 Dec 05 11:11:03 crc kubenswrapper[4728]: I1205 11:11:03.873850 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1dc5e673-e410-40d7-b17d-63b09d59d6ee","Type":"ContainerDied","Data":"a3fc05a4cdbc4b3e3702320165f0277e20563a11346a9f2335ebd904e0b86e4e"} Dec 05 11:11:03 crc kubenswrapper[4728]: I1205 11:11:03.876144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2dq9w" event={"ID":"99a5c711-5c13-4615-93fc-9fbf02ce54ca","Type":"ContainerStarted","Data":"7e5ea3405a7347a67b38d8ec709e68d51dde4e4cb5cf0b0402f55d165e01df8f"} Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.101562 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.114968 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-2dq9w" podStartSLOduration=179.114943637 podStartE2EDuration="2m59.114943637s" podCreationTimestamp="2025-12-05 11:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:03.92056208 +0000 UTC m=+198.062684793" watchObservedRunningTime="2025-12-05 11:11:05.114943637 +0000 UTC m=+199.257066350" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.153731 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access\") pod \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.153835 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir\") pod \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\" (UID: \"1dc5e673-e410-40d7-b17d-63b09d59d6ee\") " Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.154111 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1dc5e673-e410-40d7-b17d-63b09d59d6ee" (UID: "1dc5e673-e410-40d7-b17d-63b09d59d6ee"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.154387 4728 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.159406 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1dc5e673-e410-40d7-b17d-63b09d59d6ee" (UID: "1dc5e673-e410-40d7-b17d-63b09d59d6ee"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.255312 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1dc5e673-e410-40d7-b17d-63b09d59d6ee-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.887604 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1dc5e673-e410-40d7-b17d-63b09d59d6ee","Type":"ContainerDied","Data":"eb7a70d5ad04151c44bb0ab21451da393ea4b0b7bfc80923270f32c2b0c8848a"} Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.887653 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb7a70d5ad04151c44bb0ab21451da393ea4b0b7bfc80923270f32c2b0c8848a" Dec 05 11:11:05 crc kubenswrapper[4728]: I1205 11:11:05.887658 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 11:11:10 crc kubenswrapper[4728]: I1205 11:11:10.914619 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerStarted","Data":"b0362d028b7224e0d8e57e0c5b0d745888deb423e4b720cb72e9902f7a772256"} Dec 05 11:11:11 crc kubenswrapper[4728]: I1205 11:11:11.921454 4728 generic.go:334] "Generic (PLEG): container finished" podID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerID="b0362d028b7224e0d8e57e0c5b0d745888deb423e4b720cb72e9902f7a772256" exitCode=0 Dec 05 11:11:11 crc kubenswrapper[4728]: I1205 11:11:11.921500 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerDied","Data":"b0362d028b7224e0d8e57e0c5b0d745888deb423e4b720cb72e9902f7a772256"} Dec 05 11:11:12 crc kubenswrapper[4728]: I1205 11:11:12.930090 4728 generic.go:334] "Generic (PLEG): container finished" podID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerID="3f81b1f9ef481c4e2688116472c1f643245139322e6c8bc3c19f0860f6c052ca" exitCode=0 Dec 05 11:11:12 crc kubenswrapper[4728]: I1205 11:11:12.930134 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerDied","Data":"3f81b1f9ef481c4e2688116472c1f643245139322e6c8bc3c19f0860f6c052ca"} Dec 05 11:11:12 crc kubenswrapper[4728]: I1205 11:11:12.934229 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerStarted","Data":"b095182e2c599320984e84d99d1ea82a90c37eb1c4ae4bd4af432056e5be61c9"} Dec 05 11:11:12 crc kubenswrapper[4728]: I1205 11:11:12.972219 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m9cnn" podStartSLOduration=3.60630996 podStartE2EDuration="1m1.972195552s" podCreationTimestamp="2025-12-05 11:10:11 +0000 UTC" firstStartedPulling="2025-12-05 11:10:14.043514705 +0000 UTC m=+148.185637398" lastFinishedPulling="2025-12-05 11:11:12.409400287 +0000 UTC m=+206.551522990" observedRunningTime="2025-12-05 11:11:12.967171288 +0000 UTC m=+207.109293991" watchObservedRunningTime="2025-12-05 11:11:12.972195552 +0000 UTC m=+207.114318255" Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.594418 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-x9m7l"] Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.942233 4728 generic.go:334] "Generic (PLEG): container finished" podID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerID="e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7" exitCode=0 Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.942303 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerDied","Data":"e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7"} Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.943927 4728 generic.go:334] "Generic (PLEG): container finished" podID="23bda114-880c-46d2-ba79-57a723a6b547" containerID="ca0ea91998ed906477f9a32850e226d57d7dfdf93a02680caef1829f745e7686" exitCode=0 Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.944012 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerDied","Data":"ca0ea91998ed906477f9a32850e226d57d7dfdf93a02680caef1829f745e7686"} Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.946691 4728 generic.go:334] "Generic (PLEG): container finished" podID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerID="b742ff329fa3eca81567bf6a5c6837fdbf6070f60afee1e6f5dff53f9ba922c2" exitCode=0 Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.946760 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerDied","Data":"b742ff329fa3eca81567bf6a5c6837fdbf6070f60afee1e6f5dff53f9ba922c2"} Dec 05 11:11:13 crc kubenswrapper[4728]: I1205 11:11:13.951202 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerStarted","Data":"aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80"} Dec 05 11:11:14 crc kubenswrapper[4728]: I1205 11:11:14.021217 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6l2f8" podStartSLOduration=3.708382239 podStartE2EDuration="1m3.021192503s" podCreationTimestamp="2025-12-05 11:10:11 +0000 UTC" firstStartedPulling="2025-12-05 11:10:14.057812523 +0000 UTC m=+148.199935226" lastFinishedPulling="2025-12-05 11:11:13.370622797 +0000 UTC m=+207.512745490" observedRunningTime="2025-12-05 11:11:14.017933963 +0000 UTC m=+208.160056656" watchObservedRunningTime="2025-12-05 11:11:14.021192503 +0000 UTC m=+208.163315196" Dec 05 11:11:16 crc kubenswrapper[4728]: I1205 11:11:16.984470 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerStarted","Data":"c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a"} Dec 05 11:11:16 crc kubenswrapper[4728]: I1205 11:11:16.986665 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerStarted","Data":"15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692"} Dec 05 11:11:16 crc kubenswrapper[4728]: I1205 11:11:16.989153 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerStarted","Data":"35397ac4885f44c63da90dc252050f9b1b8f37e6b4a675d98ed20fa3690ba67e"} Dec 05 11:11:16 crc kubenswrapper[4728]: I1205 11:11:16.991107 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerStarted","Data":"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088"} Dec 05 11:11:17 crc kubenswrapper[4728]: I1205 11:11:17.005147 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qrl2q" podStartSLOduration=2.463128566 podStartE2EDuration="1m2.005126947s" podCreationTimestamp="2025-12-05 11:10:15 +0000 UTC" firstStartedPulling="2025-12-05 11:10:16.243772171 +0000 UTC m=+150.385894864" lastFinishedPulling="2025-12-05 11:11:15.785770552 +0000 UTC m=+209.927893245" observedRunningTime="2025-12-05 11:11:17.004936721 +0000 UTC m=+211.147059434" watchObservedRunningTime="2025-12-05 11:11:17.005126947 +0000 UTC m=+211.147249640" Dec 05 11:11:17 crc kubenswrapper[4728]: I1205 11:11:17.027550 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6zdxv" podStartSLOduration=3.617020097 podStartE2EDuration="1m6.027532473s" podCreationTimestamp="2025-12-05 11:10:11 +0000 UTC" firstStartedPulling="2025-12-05 11:10:14.047638261 +0000 UTC m=+148.189760964" lastFinishedPulling="2025-12-05 11:11:16.458150647 +0000 UTC m=+210.600273340" observedRunningTime="2025-12-05 11:11:17.026208253 +0000 UTC m=+211.168330946" watchObservedRunningTime="2025-12-05 11:11:17.027532473 +0000 UTC m=+211.169655166" Dec 05 11:11:17 crc kubenswrapper[4728]: I1205 11:11:17.062366 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w5msx" podStartSLOduration=2.929712765 podStartE2EDuration="1m5.062347781s" podCreationTimestamp="2025-12-05 11:10:12 +0000 UTC" firstStartedPulling="2025-12-05 11:10:14.057777222 +0000 UTC m=+148.199899915" lastFinishedPulling="2025-12-05 11:11:16.190412238 +0000 UTC m=+210.332534931" observedRunningTime="2025-12-05 11:11:17.060745962 +0000 UTC m=+211.202868665" watchObservedRunningTime="2025-12-05 11:11:17.062347781 +0000 UTC m=+211.204470484" Dec 05 11:11:19 crc kubenswrapper[4728]: I1205 11:11:19.007080 4728 generic.go:334] "Generic (PLEG): container finished" podID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerID="35397ac4885f44c63da90dc252050f9b1b8f37e6b4a675d98ed20fa3690ba67e" exitCode=0 Dec 05 11:11:19 crc kubenswrapper[4728]: I1205 11:11:19.007132 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerDied","Data":"35397ac4885f44c63da90dc252050f9b1b8f37e6b4a675d98ed20fa3690ba67e"} Dec 05 11:11:21 crc kubenswrapper[4728]: I1205 11:11:21.806364 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:11:21 crc kubenswrapper[4728]: I1205 11:11:21.806879 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:11:21 crc kubenswrapper[4728]: I1205 11:11:21.899074 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.275609 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.275659 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.294169 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.295148 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.310965 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.317015 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.340922 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.594730 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.595430 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:22 crc kubenswrapper[4728]: I1205 11:11:22.657127 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:23 crc kubenswrapper[4728]: I1205 11:11:23.065903 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:23 crc kubenswrapper[4728]: I1205 11:11:23.065972 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:23 crc kubenswrapper[4728]: I1205 11:11:23.075431 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:11:23 crc kubenswrapper[4728]: I1205 11:11:23.941446 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6l2f8"] Dec 05 11:11:24 crc kubenswrapper[4728]: I1205 11:11:24.540248 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w5msx"] Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.035807 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6l2f8" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" containerID="cri-o://aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" gracePeriod=2 Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.429264 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.429328 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.465455 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.702169 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.702255 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.702347 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.703000 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:11:25 crc kubenswrapper[4728]: I1205 11:11:25.703125 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de" gracePeriod=600 Dec 05 11:11:26 crc kubenswrapper[4728]: I1205 11:11:26.041818 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w5msx" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" containerID="cri-o://15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" gracePeriod=2 Dec 05 11:11:26 crc kubenswrapper[4728]: I1205 11:11:26.080943 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:28 crc kubenswrapper[4728]: I1205 11:11:28.740174 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qrl2q"] Dec 05 11:11:28 crc kubenswrapper[4728]: I1205 11:11:28.740380 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qrl2q" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" containerID="cri-o://c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" gracePeriod=2 Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.079934 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de" exitCode=0 Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.080068 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de"} Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.082263 4728 generic.go:334] "Generic (PLEG): container finished" podID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" exitCode=0 Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.082310 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerDied","Data":"15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692"} Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.084013 4728 generic.go:334] "Generic (PLEG): container finished" podID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" exitCode=0 Dec 05 11:11:32 crc kubenswrapper[4728]: I1205 11:11:32.084025 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerDied","Data":"aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80"} Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.294379 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.294675 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.295008 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.295124 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-6l2f8" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.594435 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.594956 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.595243 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:32 crc kubenswrapper[4728]: E1205 11:11:32.595365 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-w5msx" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" Dec 05 11:11:34 crc kubenswrapper[4728]: I1205 11:11:34.097997 4728 generic.go:334] "Generic (PLEG): container finished" podID="23bda114-880c-46d2-ba79-57a723a6b547" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" exitCode=0 Dec 05 11:11:34 crc kubenswrapper[4728]: I1205 11:11:34.098046 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerDied","Data":"c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a"} Dec 05 11:11:35 crc kubenswrapper[4728]: E1205 11:11:35.430559 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:35 crc kubenswrapper[4728]: E1205 11:11:35.431495 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:35 crc kubenswrapper[4728]: E1205 11:11:35.431933 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:35 crc kubenswrapper[4728]: E1205 11:11:35.431991 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-qrl2q" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" Dec 05 11:11:38 crc kubenswrapper[4728]: I1205 11:11:38.620255 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerName="oauth-openshift" containerID="cri-o://549d7e8a35ce1e67fd7d353cd12d1d38a2a6396fe71f2ef823d85afbf55ee90f" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.462388 4728 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.463180 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dc5e673-e410-40d7-b17d-63b09d59d6ee" containerName="pruner" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463212 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dc5e673-e410-40d7-b17d-63b09d59d6ee" containerName="pruner" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463336 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dc5e673-e410-40d7-b17d-63b09d59d6ee" containerName="pruner" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463648 4728 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463766 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463934 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.463995 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.464097 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.464149 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.464115 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" gracePeriod=15 Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.464838 4728 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465182 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465200 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465210 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465216 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465232 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465239 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465250 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465256 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465264 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465271 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465286 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465292 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465399 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465411 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465420 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465432 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465440 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.465566 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465574 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.465704 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 11:11:40 crc kubenswrapper[4728]: E1205 11:11:40.502387 4728 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508812 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508895 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508917 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508938 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508977 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.508997 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.509196 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.610537 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611092 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611244 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611368 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611458 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611561 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611671 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611762 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611862 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611549 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611321 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611588 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611174 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611717 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.611426 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.610687 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:40 crc kubenswrapper[4728]: I1205 11:11:40.804535 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:41 crc kubenswrapper[4728]: I1205 11:11:41.320647 4728 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 05 11:11:41 crc kubenswrapper[4728]: I1205 11:11:41.320966 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.294143 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.295049 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.295617 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.295716 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-6l2f8" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.297149 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/community-operators-6l2f8.187e4d4be296c433\": dial tcp 38.102.83.146:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-6l2f8.187e4d4be296c433 openshift-marketplace 29332 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-6l2f8,UID:7489957c-ebb7-4902-9617-9a1287ccccb4,APIVersion:v1,ResourceVersion:28211,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:11:32 +0000 UTC,LastTimestamp:2025-12-05 11:11:42.295778699 +0000 UTC m=+236.437901422,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.597335 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.597764 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.598111 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:42 crc kubenswrapper[4728]: E1205 11:11:42.598160 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-w5msx" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" Dec 05 11:11:43 crc kubenswrapper[4728]: I1205 11:11:43.173283 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 11:11:43 crc kubenswrapper[4728]: I1205 11:11:43.174629 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 11:11:43 crc kubenswrapper[4728]: I1205 11:11:43.175509 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" exitCode=2 Dec 05 11:11:44 crc kubenswrapper[4728]: I1205 11:11:44.051824 4728 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-x9m7l container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" start-of-body= Dec 05 11:11:44 crc kubenswrapper[4728]: I1205 11:11:44.051885 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.189204 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.190663 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.192194 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" exitCode=0 Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.192260 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" exitCode=0 Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.192289 4728 scope.go:117] "RemoveContainer" containerID="9b05c55e24f53fc91a2fd2b2a34b000751799933c3cb56da1ea666a6bfb6ba13" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.194256 4728 generic.go:334] "Generic (PLEG): container finished" podID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerID="549d7e8a35ce1e67fd7d353cd12d1d38a2a6396fe71f2ef823d85afbf55ee90f" exitCode=0 Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.194340 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" event={"ID":"d92fe6c3-10f5-4151-86cb-236a4c79463b","Type":"ContainerDied","Data":"549d7e8a35ce1e67fd7d353cd12d1d38a2a6396fe71f2ef823d85afbf55ee90f"} Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.196011 4728 generic.go:334] "Generic (PLEG): container finished" podID="bc456c39-c3f9-4e33-974b-f0627ac7a228" containerID="5ee07bc6c12dbbd6aabe75f5bd4c0b4ee18213d042a88e659005105366e15c59" exitCode=0 Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.196039 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bc456c39-c3f9-4e33-974b-f0627ac7a228","Type":"ContainerDied","Data":"5ee07bc6c12dbbd6aabe75f5bd4c0b4ee18213d042a88e659005105366e15c59"} Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.196645 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: E1205 11:11:45.430275 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:45 crc kubenswrapper[4728]: E1205 11:11:45.430707 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:45 crc kubenswrapper[4728]: E1205 11:11:45.430949 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:11:45 crc kubenswrapper[4728]: E1205 11:11:45.430984 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-qrl2q" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.559820 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.562320 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.564040 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.564596 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678020 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678407 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678507 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678228 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678929 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.678976 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.720737 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.721947 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.722350 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.722712 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.752171 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.752623 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.753078 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.754836 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.755262 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.755585 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.755972 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.756194 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.756371 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.756545 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.756709 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.781083 4728 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.781149 4728 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.781163 4728 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.807949 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.808652 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.809218 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.809554 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.809864 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.810206 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.810739 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.882920 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content\") pod \"23bda114-880c-46d2-ba79-57a723a6b547\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883382 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities\") pod \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883459 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities\") pod \"7489957c-ebb7-4902-9617-9a1287ccccb4\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883504 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content\") pod \"7489957c-ebb7-4902-9617-9a1287ccccb4\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883556 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjxz9\" (UniqueName: \"kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9\") pod \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883590 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s5cm\" (UniqueName: \"kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm\") pod \"7489957c-ebb7-4902-9617-9a1287ccccb4\" (UID: \"7489957c-ebb7-4902-9617-9a1287ccccb4\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883617 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content\") pod \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\" (UID: \"10176a9a-24a6-4a05-a9a7-b91062c87c9b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883646 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c45tr\" (UniqueName: \"kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr\") pod \"23bda114-880c-46d2-ba79-57a723a6b547\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.883674 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities\") pod \"23bda114-880c-46d2-ba79-57a723a6b547\" (UID: \"23bda114-880c-46d2-ba79-57a723a6b547\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.884412 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities" (OuterVolumeSpecName: "utilities") pod "10176a9a-24a6-4a05-a9a7-b91062c87c9b" (UID: "10176a9a-24a6-4a05-a9a7-b91062c87c9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.884705 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities" (OuterVolumeSpecName: "utilities") pod "7489957c-ebb7-4902-9617-9a1287ccccb4" (UID: "7489957c-ebb7-4902-9617-9a1287ccccb4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.884879 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities" (OuterVolumeSpecName: "utilities") pod "23bda114-880c-46d2-ba79-57a723a6b547" (UID: "23bda114-880c-46d2-ba79-57a723a6b547"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.890904 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm" (OuterVolumeSpecName: "kube-api-access-9s5cm") pod "7489957c-ebb7-4902-9617-9a1287ccccb4" (UID: "7489957c-ebb7-4902-9617-9a1287ccccb4"). InnerVolumeSpecName "kube-api-access-9s5cm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.891179 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9" (OuterVolumeSpecName: "kube-api-access-fjxz9") pod "10176a9a-24a6-4a05-a9a7-b91062c87c9b" (UID: "10176a9a-24a6-4a05-a9a7-b91062c87c9b"). InnerVolumeSpecName "kube-api-access-fjxz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.892334 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr" (OuterVolumeSpecName: "kube-api-access-c45tr") pod "23bda114-880c-46d2-ba79-57a723a6b547" (UID: "23bda114-880c-46d2-ba79-57a723a6b547"). InnerVolumeSpecName "kube-api-access-c45tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.950061 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10176a9a-24a6-4a05-a9a7-b91062c87c9b" (UID: "10176a9a-24a6-4a05-a9a7-b91062c87c9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.967279 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7489957c-ebb7-4902-9617-9a1287ccccb4" (UID: "7489957c-ebb7-4902-9617-9a1287ccccb4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985426 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985488 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985520 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlnhg\" (UniqueName: \"kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985555 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985583 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985622 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985646 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985683 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985710 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985736 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985762 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985816 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985854 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.985882 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert\") pod \"d92fe6c3-10f5-4151-86cb-236a4c79463b\" (UID: \"d92fe6c3-10f5-4151-86cb-236a4c79463b\") " Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986053 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986070 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7489957c-ebb7-4902-9617-9a1287ccccb4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986082 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjxz9\" (UniqueName: \"kubernetes.io/projected/10176a9a-24a6-4a05-a9a7-b91062c87c9b-kube-api-access-fjxz9\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986094 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s5cm\" (UniqueName: \"kubernetes.io/projected/7489957c-ebb7-4902-9617-9a1287ccccb4-kube-api-access-9s5cm\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986106 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986116 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c45tr\" (UniqueName: \"kubernetes.io/projected/23bda114-880c-46d2-ba79-57a723a6b547-kube-api-access-c45tr\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986127 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986140 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10176a9a-24a6-4a05-a9a7-b91062c87c9b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.986988 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.988328 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.988899 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.989366 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.990029 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.990543 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.990453 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.991298 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg" (OuterVolumeSpecName: "kube-api-access-tlnhg") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "kube-api-access-tlnhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.991766 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.993165 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.993766 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.994074 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.994578 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:45 crc kubenswrapper[4728]: I1205 11:11:45.997074 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d92fe6c3-10f5-4151-86cb-236a4c79463b" (UID: "d92fe6c3-10f5-4151-86cb-236a4c79463b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.037711 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23bda114-880c-46d2-ba79-57a723a6b547" (UID: "23bda114-880c-46d2-ba79-57a723a6b547"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087205 4728 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087249 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087265 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087281 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087297 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlnhg\" (UniqueName: \"kubernetes.io/projected/d92fe6c3-10f5-4151-86cb-236a4c79463b-kube-api-access-tlnhg\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087309 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087323 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087336 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23bda114-880c-46d2-ba79-57a723a6b547-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087348 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087363 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087375 4728 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087386 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087398 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087409 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.087420 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d92fe6c3-10f5-4151-86cb-236a4c79463b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.202007 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6d3ab30f24a2f88b457ace856fcd832eb7cecd446b8f96c8a4c104dcc46b2b8a"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.204389 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5msx" event={"ID":"10176a9a-24a6-4a05-a9a7-b91062c87c9b","Type":"ContainerDied","Data":"a79346779735f5f7500941fa415c5d0d811fd19a24336df3982a9c4bf79b263e"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.204455 4728 scope.go:117] "RemoveContainer" containerID="15f45339d822353e74e01238eaca2cd8957d5b1e3c471f1c6e8f311307249692" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.204612 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5msx" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.205904 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.206211 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.206815 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.207187 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.207522 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.207911 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.208883 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l2f8" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.208862 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l2f8" event={"ID":"7489957c-ebb7-4902-9617-9a1287ccccb4","Type":"ContainerDied","Data":"1184c4a12a475201716d900423b728132d2d529fa24876427011b4f23bb759c5"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.209475 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.209657 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.209880 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.210219 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.210580 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.210916 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.212685 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" exitCode=0 Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.212714 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" exitCode=0 Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.212808 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.221238 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.222135 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.222619 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.222973 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.223216 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.223471 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.224019 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerStarted","Data":"b1832c94119ca4f8b4ebd72b2c33c0ba7f3dd2ae97f72fc912bc7a363c7dac8f"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.224712 4728 scope.go:117] "RemoveContainer" containerID="b742ff329fa3eca81567bf6a5c6837fdbf6070f60afee1e6f5dff53f9ba922c2" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.227197 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.227424 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.227614 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.227881 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.228100 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrl2q" event={"ID":"23bda114-880c-46d2-ba79-57a723a6b547","Type":"ContainerDied","Data":"4d41df0564d22de12b4695b1d1339fbf1e63ef4d9fe54c515e26afbab888ba20"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.228136 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrl2q" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.228673 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.229009 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.229297 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.229591 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.229952 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.230217 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.230469 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.230735 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.231078 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerStarted","Data":"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.234778 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerStarted","Data":"e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.237506 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.237950 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.237968 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" event={"ID":"d92fe6c3-10f5-4151-86cb-236a4c79463b","Type":"ContainerDied","Data":"c26b67a977c20de5a4193e632d3e419928cac1abce3a721d076ec77c80de27a0"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.238020 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.238683 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.239088 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.239294 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.239520 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.240226 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.240670 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.241392 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.241997 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.242091 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf"} Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.242284 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.242556 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.247058 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.247536 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.248026 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.249368 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.249872 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.250236 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.268077 4728 scope.go:117] "RemoveContainer" containerID="df8211e03f0f3897ce247afeac0664f4f05b741dd33941d6ae4a3559fc75b7d4" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.347502 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.347916 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.348498 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.348710 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.348924 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.349142 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.349363 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.349544 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.349725 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.349944 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.350121 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.350299 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.354958 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.355662 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.355841 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.356318 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.356353 4728 scope.go:117] "RemoveContainer" containerID="aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.356707 4728 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.359702 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.371137 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.423843 4728 scope.go:117] "RemoveContainer" containerID="3f81b1f9ef481c4e2688116472c1f643245139322e6c8bc3c19f0860f6c052ca" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.482429 4728 scope.go:117] "RemoveContainer" containerID="3a56baf506365daa34cc2c52670bf1078508a32600c41ecc0b9139487e28471a" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.490316 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.490751 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.490962 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.491111 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.491259 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.491403 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.498675 4728 scope.go:117] "RemoveContainer" containerID="dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.520382 4728 scope.go:117] "RemoveContainer" containerID="af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.536717 4728 scope.go:117] "RemoveContainer" containerID="d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.551738 4728 scope.go:117] "RemoveContainer" containerID="920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.572697 4728 scope.go:117] "RemoveContainer" containerID="adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.590504 4728 scope.go:117] "RemoveContainer" containerID="51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.597341 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access\") pod \"bc456c39-c3f9-4e33-974b-f0627ac7a228\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.597896 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock\") pod \"bc456c39-c3f9-4e33-974b-f0627ac7a228\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.597943 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir\") pod \"bc456c39-c3f9-4e33-974b-f0627ac7a228\" (UID: \"bc456c39-c3f9-4e33-974b-f0627ac7a228\") " Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.598120 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bc456c39-c3f9-4e33-974b-f0627ac7a228" (UID: "bc456c39-c3f9-4e33-974b-f0627ac7a228"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.598438 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock" (OuterVolumeSpecName: "var-lock") pod "bc456c39-c3f9-4e33-974b-f0627ac7a228" (UID: "bc456c39-c3f9-4e33-974b-f0627ac7a228"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.607168 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bc456c39-c3f9-4e33-974b-f0627ac7a228" (UID: "bc456c39-c3f9-4e33-974b-f0627ac7a228"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.616177 4728 scope.go:117] "RemoveContainer" containerID="dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.616463 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\": container with ID starting with dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77 not found: ID does not exist" containerID="dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.616500 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77"} err="failed to get container status \"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\": rpc error: code = NotFound desc = could not find container \"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\": container with ID starting with dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.616531 4728 scope.go:117] "RemoveContainer" containerID="af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.616775 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\": container with ID starting with af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998 not found: ID does not exist" containerID="af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.616820 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998"} err="failed to get container status \"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\": rpc error: code = NotFound desc = could not find container \"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\": container with ID starting with af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.616841 4728 scope.go:117] "RemoveContainer" containerID="d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.617079 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\": container with ID starting with d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2 not found: ID does not exist" containerID="d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617105 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2"} err="failed to get container status \"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\": rpc error: code = NotFound desc = could not find container \"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\": container with ID starting with d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617121 4728 scope.go:117] "RemoveContainer" containerID="920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.617280 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\": container with ID starting with 920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71 not found: ID does not exist" containerID="920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617306 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71"} err="failed to get container status \"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\": rpc error: code = NotFound desc = could not find container \"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\": container with ID starting with 920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617322 4728 scope.go:117] "RemoveContainer" containerID="adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.617484 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\": container with ID starting with adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509 not found: ID does not exist" containerID="adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617509 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509"} err="failed to get container status \"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\": rpc error: code = NotFound desc = could not find container \"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\": container with ID starting with adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617523 4728 scope.go:117] "RemoveContainer" containerID="51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.617673 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\": container with ID starting with 51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159 not found: ID does not exist" containerID="51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617692 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159"} err="failed to get container status \"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\": rpc error: code = NotFound desc = could not find container \"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\": container with ID starting with 51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617706 4728 scope.go:117] "RemoveContainer" containerID="dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617946 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77"} err="failed to get container status \"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\": rpc error: code = NotFound desc = could not find container \"dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77\": container with ID starting with dd145f7ac78fc008f9b9b24a11f51d6afc11e3cfa047b8d178d11507ddde8e77 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.617971 4728 scope.go:117] "RemoveContainer" containerID="af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618176 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998"} err="failed to get container status \"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\": rpc error: code = NotFound desc = could not find container \"af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998\": container with ID starting with af8363ba45c19a07e7d11064c2e321ec28b939288d01eb82d8bd9e1ee6b93998 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618194 4728 scope.go:117] "RemoveContainer" containerID="d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618343 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2"} err="failed to get container status \"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\": rpc error: code = NotFound desc = could not find container \"d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2\": container with ID starting with d8ab41915fb3de0296bc2ba49fe39835620c1ed216790add23ccc2c23ac718c2 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618357 4728 scope.go:117] "RemoveContainer" containerID="920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618500 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71"} err="failed to get container status \"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\": rpc error: code = NotFound desc = could not find container \"920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71\": container with ID starting with 920da764fdc49312fdf906f31a1280028d5762b4b7af2b3806c9488e3dfa9d71 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.618514 4728 scope.go:117] "RemoveContainer" containerID="adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.619148 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509"} err="failed to get container status \"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\": rpc error: code = NotFound desc = could not find container \"adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509\": container with ID starting with adb81cae56872a55c5781abe457653330ce2284251d64366ece5f2f05055b509 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.619208 4728 scope.go:117] "RemoveContainer" containerID="51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.619522 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159"} err="failed to get container status \"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\": rpc error: code = NotFound desc = could not find container \"51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159\": container with ID starting with 51514e9a85c0ba953ab01cf0b3ca593fef88b8bc7da0fff864ecc2a7f4df2159 not found: ID does not exist" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.619570 4728 scope.go:117] "RemoveContainer" containerID="c55d2540c68aeaf2690479a320d33f722587a33450b0dcd0ed1f3bcea22e9b9a" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.636361 4728 scope.go:117] "RemoveContainer" containerID="ca0ea91998ed906477f9a32850e226d57d7dfdf93a02680caef1829f745e7686" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.654881 4728 scope.go:117] "RemoveContainer" containerID="07fde169d36645af2ad129bc615ca4ecc6cb4a40f79a01e4c8b1a3c63fd41486" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.672315 4728 scope.go:117] "RemoveContainer" containerID="549d7e8a35ce1e67fd7d353cd12d1d38a2a6396fe71f2ef823d85afbf55ee90f" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.701215 4728 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.702950 4728 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc456c39-c3f9-4e33-974b-f0627ac7a228-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: I1205 11:11:46.703077 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc456c39-c3f9-4e33-974b-f0627ac7a228-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:46 crc kubenswrapper[4728]: E1205 11:11:46.726327 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/community-operators-6l2f8.187e4d4be296c433\": dial tcp 38.102.83.146:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-6l2f8.187e4d4be296c433 openshift-marketplace 29332 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-6l2f8,UID:7489957c-ebb7-4902-9617-9a1287ccccb4,APIVersion:v1,ResourceVersion:28211,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:11:32 +0000 UTC,LastTimestamp:2025-12-05 11:11:42.295778699 +0000 UTC m=+236.437901422,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.249282 4728 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.250063 4728 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.250982 4728 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.251348 4728 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.251824 4728 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.251874 4728 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.252245 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="200ms" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.255590 4728 generic.go:334] "Generic (PLEG): container finished" podID="09c93128-1454-446f-bb75-771442084d74" containerID="b1832c94119ca4f8b4ebd72b2c33c0ba7f3dd2ae97f72fc912bc7a363c7dac8f" exitCode=0 Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.255710 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerDied","Data":"b1832c94119ca4f8b4ebd72b2c33c0ba7f3dd2ae97f72fc912bc7a363c7dac8f"} Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.256279 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.256529 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.256820 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.257286 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.257815 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.258195 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.262301 4728 generic.go:334] "Generic (PLEG): container finished" podID="09543444-057d-42b7-a103-6af978f7c627" containerID="5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea" exitCode=0 Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.262363 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerDied","Data":"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea"} Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.263643 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.263883 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.264455 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.264825 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.265306 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.265757 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.266011 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.268836 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bc456c39-c3f9-4e33-974b-f0627ac7a228","Type":"ContainerDied","Data":"2dc4cdcdd64d34d6ee3a539e1f44db34d3672d22512617c5c97e93f3fa862d2c"} Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.268864 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dc4cdcdd64d34d6ee3a539e1f44db34d3672d22512617c5c97e93f3fa862d2c" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.268934 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.272312 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612"} Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.273279 4728 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.273623 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.274181 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.274459 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.274753 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.275085 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.275387 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.275678 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.276643 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.285687 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.286096 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.286420 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.286994 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.287441 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.287668 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.287949 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.291787 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.294043 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.294681 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.295164 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.295417 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.295696 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.295982 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.296256 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.296610 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.296893 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: I1205 11:11:47.297940 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.454613 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="400ms" Dec 05 11:11:47 crc kubenswrapper[4728]: E1205 11:11:47.856712 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="800ms" Dec 05 11:11:48 crc kubenswrapper[4728]: E1205 11:11:48.278202 4728 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:11:48 crc kubenswrapper[4728]: E1205 11:11:48.657977 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="1.6s" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.286691 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerStarted","Data":"1ce3204e059fc718ca7e684da3b0529b22391a3c9958436f86ebb4f4f01d8cda"} Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.287692 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.288110 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.288616 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.289163 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.289652 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.290009 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.290372 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.290736 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:49 crc kubenswrapper[4728]: I1205 11:11:49.291112 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: E1205 11:11:50.259760 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="3.2s" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.295808 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerStarted","Data":"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba"} Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.295922 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.296168 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.296384 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.296660 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.297323 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.297560 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.297824 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.298084 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: I1205 11:11:50.298379 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:50 crc kubenswrapper[4728]: E1205 11:11:50.450266 4728 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" volumeName="registry-storage" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.461354 4728 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" interval="6.4s" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.934960 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:11:53Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:11:53Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:11:53Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T11:11:53Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:15adb3b2133604b064893f8009a74145e4c8bb5b134d111346dcccbdd2aa9bc2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:164fc35a19aa6cc886c8015c8ee3eba4895e76b1152cb9d795e4f3154a8533a3\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610512706},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:610b8d322265b2c9d6b07efb2be26bf4d91e428b46412d73f5bdae0218004794\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:eafb9c83c480396c34e85d1f5f5c2623be6305031245be36455850c0398bfcc7\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1209064267},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:0029526507396e493c5dce1652c41ed9c239b29e84ee579a2735fdb1aa3bce83\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:e1d263cd2113e0727021ccf27c8a671f8cfeaefbf93d60e3a918d6f60c136c30\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201604946},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.935430 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.935973 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.936271 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.936766 4728 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:53 crc kubenswrapper[4728]: E1205 11:11:53.936839 4728 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.046248 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.046301 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.089766 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.090518 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.090832 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.091196 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.091435 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.091874 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.092409 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.092714 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.093012 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.093328 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.365667 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.366415 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.367125 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.367681 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.368214 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.368763 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.369246 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.369663 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.370231 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.370673 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.470833 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.470898 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.532027 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.532576 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.532866 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.533145 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.533425 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.533685 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.533944 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.534158 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.534359 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:54 crc kubenswrapper[4728]: I1205 11:11:54.534557 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.075690 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.076224 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.136009 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.136400 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.136566 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.136701 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.136874 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.137083 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.137292 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.137578 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.137929 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.138128 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.385249 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.386036 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.386834 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.387743 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.388202 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.388654 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.388990 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.389391 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.389694 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.390119 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.391785 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.392174 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.392618 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.393156 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.393617 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.394130 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.394633 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.395131 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.395447 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:55 crc kubenswrapper[4728]: I1205 11:11:55.395992 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.330314 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.330910 4728 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e" exitCode=1 Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.331022 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e"} Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.331667 4728 scope.go:117] "RemoveContainer" containerID="e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.332226 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.332525 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.332883 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.333671 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.333915 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.334239 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.334476 4728 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.334621 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.334765 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.334922 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.352058 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.354751 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.355129 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.355629 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.356044 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.356884 4728 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.357166 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.357430 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.357908 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.358723 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.359466 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.361965 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.362338 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.362709 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.363136 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.363459 4728 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.364325 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.365550 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.365913 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.367469 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.367839 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.386914 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.386955 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:11:56 crc kubenswrapper[4728]: E1205 11:11:56.387530 4728 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:56 crc kubenswrapper[4728]: I1205 11:11:56.388209 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:56 crc kubenswrapper[4728]: E1205 11:11:56.727473 4728 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/community-operators-6l2f8.187e4d4be296c433\": dial tcp 38.102.83.146:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-6l2f8.187e4d4be296c433 openshift-marketplace 29332 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-6l2f8,UID:7489957c-ebb7-4902-9617-9a1287ccccb4,APIVersion:v1,ResourceVersion:28211,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of aa4a2c2832a98e895c63611ba804b3727ce7131281c9ed8969b1ffdc6a94ad80 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 11:11:32 +0000 UTC,LastTimestamp:2025-12-05 11:11:42.295778699 +0000 UTC m=+236.437901422,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.342440 4728 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="72690e64b22afa2c9a367bd36935216c3370c11eb3d77c41ab99604ec4f970e6" exitCode=0 Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.342539 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"72690e64b22afa2c9a367bd36935216c3370c11eb3d77c41ab99604ec4f970e6"} Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.342578 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"95f543b6eb3c2a28aacaa175ade8feafcff85a77f76255c8ab5ba6c814923c0f"} Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.342974 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.343002 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:11:57 crc kubenswrapper[4728]: E1205 11:11:57.343758 4728 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.343874 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.344067 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.345009 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.345523 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.346181 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.346556 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.346961 4728 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.347196 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.347462 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.347873 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.348192 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.348287 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3c334ca4126424f88f5218ea2702acb228bea014730d790fde180ec4d15cf6cb"} Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.349334 4728 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.349803 4728 status_manager.go:851] "Failed to get status for pod" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" pod="openshift-authentication/oauth-openshift-558db77b4-x9m7l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-x9m7l\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.350016 4728 status_manager.go:851] "Failed to get status for pod" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" pod="openshift-marketplace/certified-operators-w5msx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5msx\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.350375 4728 status_manager.go:851] "Failed to get status for pod" podUID="09543444-057d-42b7-a103-6af978f7c627" pod="openshift-marketplace/redhat-marketplace-5wcjm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-5wcjm\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.350904 4728 status_manager.go:851] "Failed to get status for pod" podUID="23bda114-880c-46d2-ba79-57a723a6b547" pod="openshift-marketplace/redhat-operators-qrl2q" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qrl2q\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.351499 4728 status_manager.go:851] "Failed to get status for pod" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" pod="openshift-marketplace/community-operators-6l2f8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6l2f8\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.351973 4728 status_manager.go:851] "Failed to get status for pod" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-w8qlp\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.352316 4728 status_manager.go:851] "Failed to get status for pod" podUID="09c93128-1454-446f-bb75-771442084d74" pod="openshift-marketplace/redhat-marketplace-mjf89" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mjf89\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.352785 4728 status_manager.go:851] "Failed to get status for pod" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" pod="openshift-marketplace/redhat-operators-tfbzt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tfbzt\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:57 crc kubenswrapper[4728]: I1205 11:11:57.353257 4728 status_manager.go:851] "Failed to get status for pod" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.146:6443: connect: connection refused" Dec 05 11:11:58 crc kubenswrapper[4728]: I1205 11:11:58.038985 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:11:58 crc kubenswrapper[4728]: I1205 11:11:58.366218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"42489ba87c5a87fda2df25734f123e757f4c0a3a816f57cf541369f3f8162545"} Dec 05 11:11:59 crc kubenswrapper[4728]: I1205 11:11:59.367840 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8ea916e607bf6dd5f118f869dd481c2f960854b27a02a270fac4670caf966c87"} Dec 05 11:12:00 crc kubenswrapper[4728]: I1205 11:12:00.391144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"973fd46b5239cfadef41e24d0af3b1fa53836b13f07f22ea314ff4cf8f10d2f4"} Dec 05 11:12:00 crc kubenswrapper[4728]: I1205 11:12:00.391191 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a22a5c407ee4a11a16a62a7adf810d72cf4538903aedce5b7e8989c6962f06b8"} Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.268567 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.269507 4728 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.269829 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.403143 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"30fb760a59ee69ab045efaffa3246cf1e013d7f610a00196f350ad562b1b680d"} Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.403684 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.403727 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.403921 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:01 crc kubenswrapper[4728]: I1205 11:12:01.413534 4728 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:02 crc kubenswrapper[4728]: I1205 11:12:02.408656 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:02 crc kubenswrapper[4728]: I1205 11:12:02.408955 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.388771 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.389680 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.390265 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.390289 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.397681 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.400137 4728 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ea74518-6d0c-480d-be4c-7aa26846ccf6" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.429362 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.429395 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.434271 4728 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ea74518-6d0c-480d-be4c-7aa26846ccf6" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.435962 4728 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://42489ba87c5a87fda2df25734f123e757f4c0a3a816f57cf541369f3f8162545" Dec 05 11:12:06 crc kubenswrapper[4728]: I1205 11:12:06.436009 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:07 crc kubenswrapper[4728]: I1205 11:12:07.435923 4728 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:07 crc kubenswrapper[4728]: I1205 11:12:07.436169 4728 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e6b1dbb0-8a99-4b3b-870e-771cdaac1bac" Dec 05 11:12:07 crc kubenswrapper[4728]: I1205 11:12:07.438762 4728 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ea74518-6d0c-480d-be4c-7aa26846ccf6" Dec 05 11:12:11 crc kubenswrapper[4728]: I1205 11:12:11.269199 4728 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 11:12:11 crc kubenswrapper[4728]: I1205 11:12:11.269894 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 11:12:14 crc kubenswrapper[4728]: I1205 11:12:14.055725 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 11:12:14 crc kubenswrapper[4728]: I1205 11:12:14.664671 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 11:12:15 crc kubenswrapper[4728]: I1205 11:12:15.653317 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.044186 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.149729 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.260427 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.423242 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.479864 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.668061 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.740832 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.753204 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 11:12:16 crc kubenswrapper[4728]: I1205 11:12:16.788524 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.303982 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.422115 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.443457 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.483773 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.531530 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 11:12:17 crc kubenswrapper[4728]: I1205 11:12:17.602647 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.064211 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.124938 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.188625 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.201407 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.320996 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.336004 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.373086 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.410319 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.490859 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.540258 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.571333 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.800028 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.888250 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 11:12:18 crc kubenswrapper[4728]: I1205 11:12:18.949938 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.015145 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.041272 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.044919 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.261297 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.345904 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.374000 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.380339 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.401544 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.482779 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.518479 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.555598 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.634288 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.652305 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.678778 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.698432 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.706974 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.767640 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.784123 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.785606 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.836642 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.876922 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.912436 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.942377 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 11:12:19 crc kubenswrapper[4728]: I1205 11:12:19.993726 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.042359 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.184483 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.186396 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.248221 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.277542 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.277567 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.292210 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.332031 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.399884 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.434403 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.487874 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.536321 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.613825 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 11:12:20 crc kubenswrapper[4728]: I1205 11:12:20.980213 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.013249 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.078339 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.089690 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.159668 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.254133 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.269303 4728 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.269363 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.269413 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.270083 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"3c334ca4126424f88f5218ea2702acb228bea014730d790fde180ec4d15cf6cb"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.270189 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://3c334ca4126424f88f5218ea2702acb228bea014730d790fde180ec4d15cf6cb" gracePeriod=30 Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.330282 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.356932 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.401584 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.544467 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.581662 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.689648 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.721845 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.745754 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.905304 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 11:12:21 crc kubenswrapper[4728]: I1205 11:12:21.961256 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.054171 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.149174 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.235566 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.248886 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.431657 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.550497 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.589743 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.690465 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.729083 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 11:12:22 crc kubenswrapper[4728]: I1205 11:12:22.977602 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.026599 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.262562 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.353084 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.372638 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.421985 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.510089 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.540727 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.541978 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.594350 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.637823 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.646830 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.687817 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.706247 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.714307 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.727476 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.751064 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.808008 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.895249 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 11:12:23 crc kubenswrapper[4728]: I1205 11:12:23.932379 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.003907 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.053059 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.064007 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.084442 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.089364 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.114411 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.151091 4728 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.151591 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mjf89" podStartSLOduration=38.175167369 podStartE2EDuration="2m11.151574756s" podCreationTimestamp="2025-12-05 11:10:13 +0000 UTC" firstStartedPulling="2025-12-05 11:10:15.209249144 +0000 UTC m=+149.351371837" lastFinishedPulling="2025-12-05 11:11:48.185656531 +0000 UTC m=+242.327779224" observedRunningTime="2025-12-05 11:12:01.620843641 +0000 UTC m=+255.762966334" watchObservedRunningTime="2025-12-05 11:12:24.151574756 +0000 UTC m=+278.293697449" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.153310 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5wcjm" podStartSLOduration=38.193688419 podStartE2EDuration="2m10.153304631s" podCreationTimestamp="2025-12-05 11:10:14 +0000 UTC" firstStartedPulling="2025-12-05 11:10:16.304677599 +0000 UTC m=+150.446800292" lastFinishedPulling="2025-12-05 11:11:48.264293801 +0000 UTC m=+242.406416504" observedRunningTime="2025-12-05 11:12:01.536194463 +0000 UTC m=+255.678317176" watchObservedRunningTime="2025-12-05 11:12:24.153304631 +0000 UTC m=+278.295427324" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.154050 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tfbzt" podStartSLOduration=40.879189419 podStartE2EDuration="2m10.15404314s" podCreationTimestamp="2025-12-05 11:10:14 +0000 UTC" firstStartedPulling="2025-12-05 11:10:16.309840437 +0000 UTC m=+150.451963130" lastFinishedPulling="2025-12-05 11:11:45.584694158 +0000 UTC m=+239.726816851" observedRunningTime="2025-12-05 11:12:01.444543329 +0000 UTC m=+255.586666032" watchObservedRunningTime="2025-12-05 11:12:24.15404314 +0000 UTC m=+278.296165833" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.155503 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-w5msx","openshift-authentication/oauth-openshift-558db77b4-x9m7l","openshift-marketplace/redhat-operators-qrl2q","openshift-marketplace/community-operators-6l2f8"] Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.155601 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.160640 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.174579 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=23.174563045 podStartE2EDuration="23.174563045s" podCreationTimestamp="2025-12-05 11:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:12:24.173084736 +0000 UTC m=+278.315207449" watchObservedRunningTime="2025-12-05 11:12:24.174563045 +0000 UTC m=+278.316685738" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.177426 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.228541 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.230421 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.355831 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.359272 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.364626 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" path="/var/lib/kubelet/pods/10176a9a-24a6-4a05-a9a7-b91062c87c9b/volumes" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.368427 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23bda114-880c-46d2-ba79-57a723a6b547" path="/var/lib/kubelet/pods/23bda114-880c-46d2-ba79-57a723a6b547/volumes" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.369326 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" path="/var/lib/kubelet/pods/7489957c-ebb7-4902-9617-9a1287ccccb4/volumes" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.370026 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" path="/var/lib/kubelet/pods/d92fe6c3-10f5-4151-86cb-236a4c79463b/volumes" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.385216 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.390971 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.424859 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.434317 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.444437 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.614133 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.659680 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.674980 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.682862 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.746737 4728 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.790904 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.897030 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.941824 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.979627 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 11:12:24 crc kubenswrapper[4728]: I1205 11:12:24.982273 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.239935 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.241846 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.255558 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.445289 4728 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.450762 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.508169 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.646362 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.663605 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.666281 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.761878 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.780370 4728 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.780623 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612" gracePeriod=5 Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.879974 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.949479 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 11:12:25 crc kubenswrapper[4728]: I1205 11:12:25.978882 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.046720 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.049300 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.165777 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.394050 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.448407 4728 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.473515 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.541093 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.616116 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.714254 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.726733 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.727550 4728 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.813965 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 11:12:26 crc kubenswrapper[4728]: I1205 11:12:26.976892 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.013014 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.021658 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.025752 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.062356 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.075029 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.097830 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.130732 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.136062 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.241464 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.274539 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.285194 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.357309 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.384886 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.408354 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.414922 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.558353 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.678681 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.710698 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.929181 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 11:12:27 crc kubenswrapper[4728]: I1205 11:12:27.930278 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.010931 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.021156 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.025769 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.030156 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.055070 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.296501 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.614942 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.651330 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.905139 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 11:12:28 crc kubenswrapper[4728]: I1205 11:12:28.936173 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 11:12:29 crc kubenswrapper[4728]: I1205 11:12:29.019318 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 11:12:29 crc kubenswrapper[4728]: I1205 11:12:29.380481 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 11:12:29 crc kubenswrapper[4728]: I1205 11:12:29.778840 4728 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 11:12:29 crc kubenswrapper[4728]: I1205 11:12:29.818878 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 11:12:29 crc kubenswrapper[4728]: I1205 11:12:29.844539 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.362633 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.363141 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422436 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422485 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422503 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422522 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422554 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422619 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422656 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422612 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422701 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422789 4728 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422828 4728 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422842 4728 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.422854 4728 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.430677 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.523601 4728 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.575173 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.575235 4728 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612" exitCode=137 Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.575282 4728 scope.go:117] "RemoveContainer" containerID="d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.575320 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.594519 4728 scope.go:117] "RemoveContainer" containerID="d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612" Dec 05 11:12:31 crc kubenswrapper[4728]: E1205 11:12:31.595654 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612\": container with ID starting with d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612 not found: ID does not exist" containerID="d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612" Dec 05 11:12:31 crc kubenswrapper[4728]: I1205 11:12:31.595700 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612"} err="failed to get container status \"d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612\": rpc error: code = NotFound desc = could not find container \"d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612\": container with ID starting with d83b861e64e4a734e440702d4bac8a47bd7cf9650c472101383dbfb9d675e612 not found: ID does not exist" Dec 05 11:12:32 crc kubenswrapper[4728]: I1205 11:12:32.359226 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 11:12:37 crc kubenswrapper[4728]: I1205 11:12:37.891943 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 11:12:39 crc kubenswrapper[4728]: I1205 11:12:39.096081 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 11:12:40 crc kubenswrapper[4728]: I1205 11:12:40.935459 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 11:12:41 crc kubenswrapper[4728]: I1205 11:12:41.879514 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 11:12:42 crc kubenswrapper[4728]: I1205 11:12:42.141258 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 11:12:42 crc kubenswrapper[4728]: I1205 11:12:42.657466 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 11:12:42 crc kubenswrapper[4728]: I1205 11:12:42.748454 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 11:12:43 crc kubenswrapper[4728]: I1205 11:12:43.253941 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.012181 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.043445 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.649789 4728 generic.go:334] "Generic (PLEG): container finished" podID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerID="6eb7a8c4de7b91719bcde13603bacdcace249ece06ce508ae7a9dfc739264d6b" exitCode=0 Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.649915 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerDied","Data":"6eb7a8c4de7b91719bcde13603bacdcace249ece06ce508ae7a9dfc739264d6b"} Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.650475 4728 scope.go:117] "RemoveContainer" containerID="6eb7a8c4de7b91719bcde13603bacdcace249ece06ce508ae7a9dfc739264d6b" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.943378 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.943431 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:12:44 crc kubenswrapper[4728]: I1205 11:12:44.980392 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 11:12:45 crc kubenswrapper[4728]: I1205 11:12:45.658119 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerStarted","Data":"8804388d269cf66039caca0903b822ad608b69cb6c7c3a54fd3ae6df8248bbed"} Dec 05 11:12:45 crc kubenswrapper[4728]: I1205 11:12:45.659313 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:12:45 crc kubenswrapper[4728]: I1205 11:12:45.665383 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:12:45 crc kubenswrapper[4728]: I1205 11:12:45.898811 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 11:12:45 crc kubenswrapper[4728]: I1205 11:12:45.994347 4728 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 05 11:12:46 crc kubenswrapper[4728]: I1205 11:12:46.943569 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376075 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376325 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376344 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376361 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" containerName="installer" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376369 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" containerName="installer" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376385 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376392 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376401 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376407 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376420 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376425 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376433 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376439 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376448 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerName="oauth-openshift" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376454 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerName="oauth-openshift" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376462 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376467 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376477 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376482 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376490 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376495 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="extract-utilities" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376502 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376507 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: E1205 11:12:47.376515 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376520 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="extract-content" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376623 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92fe6c3-10f5-4151-86cb-236a4c79463b" containerName="oauth-openshift" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376634 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="23bda114-880c-46d2-ba79-57a723a6b547" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376642 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376649 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc456c39-c3f9-4e33-974b-f0627ac7a228" containerName="installer" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376658 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="10176a9a-24a6-4a05-a9a7-b91062c87c9b" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.376669 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="7489957c-ebb7-4902-9617-9a1287ccccb4" containerName="registry-server" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.377093 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.380920 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.381461 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.381650 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.381726 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.382106 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.382252 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.382397 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.382539 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.382717 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.383414 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.383916 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.385147 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.394123 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.395225 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.401641 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.407856 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555343 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6ckv\" (UniqueName: \"kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555511 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555570 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555589 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555693 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555743 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555820 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555893 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555930 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.555964 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.556004 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.556070 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.556098 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656536 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656593 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656632 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656663 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656698 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656730 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656830 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656876 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656916 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656945 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.656993 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6ckv\" (UniqueName: \"kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.657036 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.657070 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.657101 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.657642 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.658136 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.658672 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.659509 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.660445 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.664955 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.666402 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.667471 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.667513 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.667716 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.667755 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.672260 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.672885 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.688127 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6ckv\" (UniqueName: \"kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv\") pod \"oauth-openshift-679cb4ddc5-kr9nb\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.702089 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:47 crc kubenswrapper[4728]: I1205 11:12:47.938107 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:12:48 crc kubenswrapper[4728]: I1205 11:12:48.691344 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" event={"ID":"8ed41172-da91-43b9-8b5b-048a3c9e58e2","Type":"ContainerStarted","Data":"e77dd83fd51595bab94df959a5aee376a2b9babcda4e658791d409bf5be70708"} Dec 05 11:12:50 crc kubenswrapper[4728]: I1205 11:12:50.084613 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 11:12:50 crc kubenswrapper[4728]: I1205 11:12:50.806926 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 11:12:50 crc kubenswrapper[4728]: I1205 11:12:50.954341 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 11:12:51 crc kubenswrapper[4728]: I1205 11:12:51.998048 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.714090 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" event={"ID":"8ed41172-da91-43b9-8b5b-048a3c9e58e2","Type":"ContainerStarted","Data":"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7"} Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.714690 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.716066 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.717744 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.717818 4728 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="3c334ca4126424f88f5218ea2702acb228bea014730d790fde180ec4d15cf6cb" exitCode=137 Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.717849 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"3c334ca4126424f88f5218ea2702acb228bea014730d790fde180ec4d15cf6cb"} Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.717877 4728 scope.go:117] "RemoveContainer" containerID="e8ca6321022609b3cd60a9e1a534cfd3e69d3f520e5252ed4ffa8f76be4af91e" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.719440 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:12:52 crc kubenswrapper[4728]: I1205 11:12:52.737712 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" podStartSLOduration=99.737698171 podStartE2EDuration="1m39.737698171s" podCreationTimestamp="2025-12-05 11:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:12:52.736810578 +0000 UTC m=+306.878933271" watchObservedRunningTime="2025-12-05 11:12:52.737698171 +0000 UTC m=+306.879820864" Dec 05 11:12:53 crc kubenswrapper[4728]: I1205 11:12:53.726680 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 11:12:53 crc kubenswrapper[4728]: I1205 11:12:53.728253 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b34cba88e618f85f8be5544623e0d06047d666877ca37bc16b6bf2e265c02a63"} Dec 05 11:12:55 crc kubenswrapper[4728]: I1205 11:12:55.241144 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 11:12:55 crc kubenswrapper[4728]: I1205 11:12:55.762787 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 11:12:57 crc kubenswrapper[4728]: I1205 11:12:57.268706 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 11:12:57 crc kubenswrapper[4728]: I1205 11:12:57.400481 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 11:12:57 crc kubenswrapper[4728]: I1205 11:12:57.858042 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 11:12:57 crc kubenswrapper[4728]: I1205 11:12:57.984045 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 11:12:58 crc kubenswrapper[4728]: I1205 11:12:58.038475 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:12:58 crc kubenswrapper[4728]: I1205 11:12:58.945986 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 11:13:00 crc kubenswrapper[4728]: I1205 11:13:00.794662 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 11:13:00 crc kubenswrapper[4728]: I1205 11:13:00.910844 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 11:13:01 crc kubenswrapper[4728]: I1205 11:13:01.268541 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:13:01 crc kubenswrapper[4728]: I1205 11:13:01.272569 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:13:01 crc kubenswrapper[4728]: I1205 11:13:01.285746 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 11:13:02 crc kubenswrapper[4728]: I1205 11:13:02.890919 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 11:13:03 crc kubenswrapper[4728]: I1205 11:13:03.978432 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 11:13:04 crc kubenswrapper[4728]: I1205 11:13:04.500829 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 11:13:05 crc kubenswrapper[4728]: I1205 11:13:05.405851 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 11:13:07 crc kubenswrapper[4728]: I1205 11:13:07.214884 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 11:13:07 crc kubenswrapper[4728]: I1205 11:13:07.550443 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 11:13:08 crc kubenswrapper[4728]: I1205 11:13:08.044006 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 11:13:08 crc kubenswrapper[4728]: I1205 11:13:08.297050 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 11:13:09 crc kubenswrapper[4728]: I1205 11:13:09.134723 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.351719 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.352219 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" podUID="991dbe07-304e-4bd9-9aca-2b29134cc869" containerName="route-controller-manager" containerID="cri-o://c6e728070843e73bbe7f9c6d28fc805ab68b553575998435e221af8d30b7c317" gracePeriod=30 Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.449183 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.449783 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerName="controller-manager" containerID="cri-o://8324ca88cc1607deceff1034311a2ecb0bf902f2b8e7606e865bddfcdc933c5d" gracePeriod=30 Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.473180 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.835747 4728 generic.go:334] "Generic (PLEG): container finished" podID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerID="8324ca88cc1607deceff1034311a2ecb0bf902f2b8e7606e865bddfcdc933c5d" exitCode=0 Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.835853 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" event={"ID":"049f4ab0-4d3e-45ab-b390-e4c80a919880","Type":"ContainerDied","Data":"8324ca88cc1607deceff1034311a2ecb0bf902f2b8e7606e865bddfcdc933c5d"} Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.843927 4728 generic.go:334] "Generic (PLEG): container finished" podID="991dbe07-304e-4bd9-9aca-2b29134cc869" containerID="c6e728070843e73bbe7f9c6d28fc805ab68b553575998435e221af8d30b7c317" exitCode=0 Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.843981 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" event={"ID":"991dbe07-304e-4bd9-9aca-2b29134cc869","Type":"ContainerDied","Data":"c6e728070843e73bbe7f9c6d28fc805ab68b553575998435e221af8d30b7c317"} Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.844014 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" event={"ID":"991dbe07-304e-4bd9-9aca-2b29134cc869","Type":"ContainerDied","Data":"d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2"} Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.844027 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0fafe5d47a1c4246fd179f18229a59670855b13d92708ec1358e21975026ce2" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.868277 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.872782 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.909924 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles\") pod \"049f4ab0-4d3e-45ab-b390-e4c80a919880\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910195 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dw9n\" (UniqueName: \"kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n\") pod \"991dbe07-304e-4bd9-9aca-2b29134cc869\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910229 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d56hs\" (UniqueName: \"kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs\") pod \"049f4ab0-4d3e-45ab-b390-e4c80a919880\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910275 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert\") pod \"049f4ab0-4d3e-45ab-b390-e4c80a919880\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910292 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert\") pod \"991dbe07-304e-4bd9-9aca-2b29134cc869\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910324 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca\") pod \"049f4ab0-4d3e-45ab-b390-e4c80a919880\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910345 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca\") pod \"991dbe07-304e-4bd9-9aca-2b29134cc869\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910363 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config\") pod \"991dbe07-304e-4bd9-9aca-2b29134cc869\" (UID: \"991dbe07-304e-4bd9-9aca-2b29134cc869\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.910380 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config\") pod \"049f4ab0-4d3e-45ab-b390-e4c80a919880\" (UID: \"049f4ab0-4d3e-45ab-b390-e4c80a919880\") " Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.911171 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca" (OuterVolumeSpecName: "client-ca") pod "991dbe07-304e-4bd9-9aca-2b29134cc869" (UID: "991dbe07-304e-4bd9-9aca-2b29134cc869"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.911550 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca" (OuterVolumeSpecName: "client-ca") pod "049f4ab0-4d3e-45ab-b390-e4c80a919880" (UID: "049f4ab0-4d3e-45ab-b390-e4c80a919880"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.911693 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "049f4ab0-4d3e-45ab-b390-e4c80a919880" (UID: "049f4ab0-4d3e-45ab-b390-e4c80a919880"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.911711 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config" (OuterVolumeSpecName: "config") pod "991dbe07-304e-4bd9-9aca-2b29134cc869" (UID: "991dbe07-304e-4bd9-9aca-2b29134cc869"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.911803 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config" (OuterVolumeSpecName: "config") pod "049f4ab0-4d3e-45ab-b390-e4c80a919880" (UID: "049f4ab0-4d3e-45ab-b390-e4c80a919880"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.919294 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs" (OuterVolumeSpecName: "kube-api-access-d56hs") pod "049f4ab0-4d3e-45ab-b390-e4c80a919880" (UID: "049f4ab0-4d3e-45ab-b390-e4c80a919880"). InnerVolumeSpecName "kube-api-access-d56hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.919852 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n" (OuterVolumeSpecName: "kube-api-access-2dw9n") pod "991dbe07-304e-4bd9-9aca-2b29134cc869" (UID: "991dbe07-304e-4bd9-9aca-2b29134cc869"). InnerVolumeSpecName "kube-api-access-2dw9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.919954 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "049f4ab0-4d3e-45ab-b390-e4c80a919880" (UID: "049f4ab0-4d3e-45ab-b390-e4c80a919880"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:11 crc kubenswrapper[4728]: I1205 11:13:11.925329 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "991dbe07-304e-4bd9-9aca-2b29134cc869" (UID: "991dbe07-304e-4bd9-9aca-2b29134cc869"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011685 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011736 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011754 4728 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011771 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dw9n\" (UniqueName: \"kubernetes.io/projected/991dbe07-304e-4bd9-9aca-2b29134cc869-kube-api-access-2dw9n\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011784 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d56hs\" (UniqueName: \"kubernetes.io/projected/049f4ab0-4d3e-45ab-b390-e4c80a919880-kube-api-access-d56hs\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011817 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/049f4ab0-4d3e-45ab-b390-e4c80a919880-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011829 4728 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/991dbe07-304e-4bd9-9aca-2b29134cc869-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011850 4728 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/049f4ab0-4d3e-45ab-b390-e4c80a919880-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.011862 4728 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/991dbe07-304e-4bd9-9aca-2b29134cc869-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.865615 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.865550 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.865763 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-52pf7" event={"ID":"049f4ab0-4d3e-45ab-b390-e4c80a919880","Type":"ContainerDied","Data":"3b37797406c86e17bea17cf0210aed8f2becda7e3187b0f9ed4d56094ef98d77"} Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.865845 4728 scope.go:117] "RemoveContainer" containerID="8324ca88cc1607deceff1034311a2ecb0bf902f2b8e7606e865bddfcdc933c5d" Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.893053 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.903760 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-52pf7"] Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.907738 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:13:12 crc kubenswrapper[4728]: I1205 11:13:12.910987 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qnmf2"] Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.050267 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m"] Dec 05 11:13:13 crc kubenswrapper[4728]: E1205 11:13:13.050528 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerName="controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.050542 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerName="controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: E1205 11:13:13.050562 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="991dbe07-304e-4bd9-9aca-2b29134cc869" containerName="route-controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.050568 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="991dbe07-304e-4bd9-9aca-2b29134cc869" containerName="route-controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.050653 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="991dbe07-304e-4bd9-9aca-2b29134cc869" containerName="route-controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.050671 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" containerName="controller-manager" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.051089 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.052776 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.052834 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.053291 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.053363 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.053682 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.053935 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.054922 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5d6857d78f-6tkwb"] Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.055732 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.057812 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.058046 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.058220 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.058325 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.058392 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.059930 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.064244 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.069305 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5d6857d78f-6tkwb"] Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.073738 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m"] Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124324 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-client-ca\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124450 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-proxy-ca-bundles\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124474 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p5z5\" (UniqueName: \"kubernetes.io/projected/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-kube-api-access-4p5z5\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124503 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-serving-cert\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124521 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75b54223-d1b1-4c8f-a9ab-64415c367f28-serving-cert\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124540 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sthjm\" (UniqueName: \"kubernetes.io/projected/75b54223-d1b1-4c8f-a9ab-64415c367f28-kube-api-access-sthjm\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124683 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-config\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124766 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-config\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.124815 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-client-ca\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225486 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-config\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225568 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-client-ca\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225595 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-config\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225623 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-client-ca\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225662 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-proxy-ca-bundles\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225685 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p5z5\" (UniqueName: \"kubernetes.io/projected/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-kube-api-access-4p5z5\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-serving-cert\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225734 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75b54223-d1b1-4c8f-a9ab-64415c367f28-serving-cert\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.225758 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sthjm\" (UniqueName: \"kubernetes.io/projected/75b54223-d1b1-4c8f-a9ab-64415c367f28-kube-api-access-sthjm\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.227036 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-client-ca\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.227193 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75b54223-d1b1-4c8f-a9ab-64415c367f28-config\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.228011 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-proxy-ca-bundles\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.228413 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-config\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.229148 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-client-ca\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.234471 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75b54223-d1b1-4c8f-a9ab-64415c367f28-serving-cert\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.244840 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-serving-cert\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.249290 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p5z5\" (UniqueName: \"kubernetes.io/projected/caed5a61-c1f7-4c88-a353-2561d8f1f8d3-kube-api-access-4p5z5\") pod \"controller-manager-5d6857d78f-6tkwb\" (UID: \"caed5a61-c1f7-4c88-a353-2561d8f1f8d3\") " pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.249362 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sthjm\" (UniqueName: \"kubernetes.io/projected/75b54223-d1b1-4c8f-a9ab-64415c367f28-kube-api-access-sthjm\") pod \"route-controller-manager-8448b4d7-6wn6m\" (UID: \"75b54223-d1b1-4c8f-a9ab-64415c367f28\") " pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.369291 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.383777 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.605363 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5d6857d78f-6tkwb"] Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.845391 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m"] Dec 05 11:13:13 crc kubenswrapper[4728]: W1205 11:13:13.853561 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75b54223_d1b1_4c8f_a9ab_64415c367f28.slice/crio-684f56aa67c2a53f5dff4d75cf2fc5e66ca38df85232a73af03e2ced6b8a081c WatchSource:0}: Error finding container 684f56aa67c2a53f5dff4d75cf2fc5e66ca38df85232a73af03e2ced6b8a081c: Status 404 returned error can't find the container with id 684f56aa67c2a53f5dff4d75cf2fc5e66ca38df85232a73af03e2ced6b8a081c Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.874077 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" event={"ID":"caed5a61-c1f7-4c88-a353-2561d8f1f8d3","Type":"ContainerStarted","Data":"31c0b3c0e94c9ed90a0a6321d38ac42ef7cdec147f06696aaa061d036c765383"} Dec 05 11:13:13 crc kubenswrapper[4728]: I1205 11:13:13.875220 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" event={"ID":"75b54223-d1b1-4c8f-a9ab-64415c367f28","Type":"ContainerStarted","Data":"684f56aa67c2a53f5dff4d75cf2fc5e66ca38df85232a73af03e2ced6b8a081c"} Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.358016 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049f4ab0-4d3e-45ab-b390-e4c80a919880" path="/var/lib/kubelet/pods/049f4ab0-4d3e-45ab-b390-e4c80a919880/volumes" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.358728 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="991dbe07-304e-4bd9-9aca-2b29134cc869" path="/var/lib/kubelet/pods/991dbe07-304e-4bd9-9aca-2b29134cc869/volumes" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.881925 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" event={"ID":"75b54223-d1b1-4c8f-a9ab-64415c367f28","Type":"ContainerStarted","Data":"d0246e06a784a8a5b27a14b10847673de841f2703cd6b15d6143281e71a1f43e"} Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.883063 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.884187 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" event={"ID":"caed5a61-c1f7-4c88-a353-2561d8f1f8d3","Type":"ContainerStarted","Data":"697ee2c56dc15db56ed657d4cfd16a99e7e7536ecdad97fdd42e2d86a0fec2a2"} Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.884949 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.900273 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.910103 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" podStartSLOduration=3.910079541 podStartE2EDuration="3.910079541s" podCreationTimestamp="2025-12-05 11:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:13:14.900892713 +0000 UTC m=+329.043015426" watchObservedRunningTime="2025-12-05 11:13:14.910079541 +0000 UTC m=+329.052202254" Dec 05 11:13:14 crc kubenswrapper[4728]: I1205 11:13:14.921316 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5d6857d78f-6tkwb" podStartSLOduration=3.921295134 podStartE2EDuration="3.921295134s" podCreationTimestamp="2025-12-05 11:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:13:14.917736827 +0000 UTC m=+329.059859520" watchObservedRunningTime="2025-12-05 11:13:14.921295134 +0000 UTC m=+329.063417837" Dec 05 11:13:15 crc kubenswrapper[4728]: I1205 11:13:15.305479 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8448b4d7-6wn6m" Dec 05 11:13:36 crc kubenswrapper[4728]: I1205 11:13:36.512112 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" podUID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" containerName="oauth-openshift" containerID="cri-o://8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7" gracePeriod=15 Dec 05 11:13:36 crc kubenswrapper[4728]: I1205 11:13:36.993727 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.007139 4728 generic.go:334] "Generic (PLEG): container finished" podID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" containerID="8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7" exitCode=0 Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.007159 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.007178 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" event={"ID":"8ed41172-da91-43b9-8b5b-048a3c9e58e2","Type":"ContainerDied","Data":"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7"} Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.007225 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb" event={"ID":"8ed41172-da91-43b9-8b5b-048a3c9e58e2","Type":"ContainerDied","Data":"e77dd83fd51595bab94df959a5aee376a2b9babcda4e658791d409bf5be70708"} Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.007242 4728 scope.go:117] "RemoveContainer" containerID="8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.026317 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-77548dbf6-hql59"] Dec 05 11:13:37 crc kubenswrapper[4728]: E1205 11:13:37.026549 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" containerName="oauth-openshift" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.026562 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" containerName="oauth-openshift" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.026687 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" containerName="oauth-openshift" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.027140 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.037573 4728 scope.go:117] "RemoveContainer" containerID="8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7" Dec 05 11:13:37 crc kubenswrapper[4728]: E1205 11:13:37.038131 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7\": container with ID starting with 8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7 not found: ID does not exist" containerID="8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.038158 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7"} err="failed to get container status \"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7\": rpc error: code = NotFound desc = could not find container \"8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7\": container with ID starting with 8752af5082e78fec516e50941d85d69b6786439337e3958f00c6e51f4ef68eb7 not found: ID does not exist" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.073582 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-77548dbf6-hql59"] Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.167859 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6ckv\" (UniqueName: \"kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168151 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168279 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168377 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168503 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168604 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168697 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168809 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.168909 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169459 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169865 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169003 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169032 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169088 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.169275 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170036 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170172 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170204 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170210 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert\") pod \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\" (UID: \"8ed41172-da91-43b9-8b5b-048a3c9e58e2\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170498 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170523 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170636 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170686 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-cliconfig\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170713 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78c95\" (UniqueName: \"kubernetes.io/projected/6d34f9fa-bc9f-4496-ad31-f384698fe961-kube-api-access-78c95\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170780 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-policies\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170826 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170867 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-router-certs\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170894 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-dir\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.170974 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-session\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171006 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-login\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171031 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-service-ca\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171097 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-serving-cert\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171123 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-error\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171170 4728 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171199 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171215 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171228 4728 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8ed41172-da91-43b9-8b5b-048a3c9e58e2-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.171239 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.172802 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.173222 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.173341 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.173454 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv" (OuterVolumeSpecName: "kube-api-access-r6ckv") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "kube-api-access-r6ckv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.174169 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.179906 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.179916 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.180389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.180610 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "8ed41172-da91-43b9-8b5b-048a3c9e58e2" (UID: "8ed41172-da91-43b9-8b5b-048a3c9e58e2"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.213693 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.214445 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5wcjm" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="registry-server" containerID="cri-o://6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba" gracePeriod=2 Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272471 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-router-certs\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272538 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-dir\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272612 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-session\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272634 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-login\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272650 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-service-ca\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272695 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-serving-cert\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-error\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272737 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272775 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272823 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-cliconfig\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272870 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78c95\" (UniqueName: \"kubernetes.io/projected/6d34f9fa-bc9f-4496-ad31-f384698fe961-kube-api-access-78c95\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272909 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-policies\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.272930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273003 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273017 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273028 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273037 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273065 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273076 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6ckv\" (UniqueName: \"kubernetes.io/projected/8ed41172-da91-43b9-8b5b-048a3c9e58e2-kube-api-access-r6ckv\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273086 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273096 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273109 4728 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8ed41172-da91-43b9-8b5b-048a3c9e58e2-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273188 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-dir\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273481 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-service-ca\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.273838 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-audit-policies\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.274019 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-cliconfig\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.274929 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.277631 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.277764 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.277868 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-session\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.277876 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-error\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.278047 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-router-certs\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.278141 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-user-template-login\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.278345 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-serving-cert\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.279495 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d34f9fa-bc9f-4496-ad31-f384698fe961-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.293060 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78c95\" (UniqueName: \"kubernetes.io/projected/6d34f9fa-bc9f-4496-ad31-f384698fe961-kube-api-access-78c95\") pod \"oauth-openshift-77548dbf6-hql59\" (UID: \"6d34f9fa-bc9f-4496-ad31-f384698fe961\") " pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.337401 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.339854 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-kr9nb"] Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.382498 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.632460 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.679189 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content\") pod \"09543444-057d-42b7-a103-6af978f7c627\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.679589 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities\") pod \"09543444-057d-42b7-a103-6af978f7c627\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.679692 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qswlx\" (UniqueName: \"kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx\") pod \"09543444-057d-42b7-a103-6af978f7c627\" (UID: \"09543444-057d-42b7-a103-6af978f7c627\") " Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.680471 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities" (OuterVolumeSpecName: "utilities") pod "09543444-057d-42b7-a103-6af978f7c627" (UID: "09543444-057d-42b7-a103-6af978f7c627"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.684451 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx" (OuterVolumeSpecName: "kube-api-access-qswlx") pod "09543444-057d-42b7-a103-6af978f7c627" (UID: "09543444-057d-42b7-a103-6af978f7c627"). InnerVolumeSpecName "kube-api-access-qswlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.704485 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09543444-057d-42b7-a103-6af978f7c627" (UID: "09543444-057d-42b7-a103-6af978f7c627"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.780745 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qswlx\" (UniqueName: \"kubernetes.io/projected/09543444-057d-42b7-a103-6af978f7c627-kube-api-access-qswlx\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.780842 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.780859 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09543444-057d-42b7-a103-6af978f7c627-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[4728]: I1205 11:13:37.801033 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-77548dbf6-hql59"] Dec 05 11:13:37 crc kubenswrapper[4728]: W1205 11:13:37.807640 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d34f9fa_bc9f_4496_ad31_f384698fe961.slice/crio-20b3b4c410147951531de0d1b4dd24f3d30c83d57067d8eacfe69aa3ed55b074 WatchSource:0}: Error finding container 20b3b4c410147951531de0d1b4dd24f3d30c83d57067d8eacfe69aa3ed55b074: Status 404 returned error can't find the container with id 20b3b4c410147951531de0d1b4dd24f3d30c83d57067d8eacfe69aa3ed55b074 Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.021842 4728 generic.go:334] "Generic (PLEG): container finished" podID="09543444-057d-42b7-a103-6af978f7c627" containerID="6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba" exitCode=0 Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.021894 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wcjm" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.021949 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerDied","Data":"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba"} Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.021976 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wcjm" event={"ID":"09543444-057d-42b7-a103-6af978f7c627","Type":"ContainerDied","Data":"22ac0437160a1b150b0cf5a48ab1f7c6bf6b12f68c51b49c3f18e201f9a4e63a"} Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.021994 4728 scope.go:117] "RemoveContainer" containerID="6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.025225 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" event={"ID":"6d34f9fa-bc9f-4496-ad31-f384698fe961","Type":"ContainerStarted","Data":"20b3b4c410147951531de0d1b4dd24f3d30c83d57067d8eacfe69aa3ed55b074"} Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.025434 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.027321 4728 patch_prober.go:28] interesting pod/oauth-openshift-77548dbf6-hql59 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.60:6443/healthz\": dial tcp 10.217.0.60:6443: connect: connection refused" start-of-body= Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.027364 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" podUID="6d34f9fa-bc9f-4496-ad31-f384698fe961" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.60:6443/healthz\": dial tcp 10.217.0.60:6443: connect: connection refused" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.043287 4728 scope.go:117] "RemoveContainer" containerID="5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.054689 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" podStartSLOduration=27.054659772 podStartE2EDuration="27.054659772s" podCreationTimestamp="2025-12-05 11:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:13:38.049985795 +0000 UTC m=+352.192108528" watchObservedRunningTime="2025-12-05 11:13:38.054659772 +0000 UTC m=+352.196782485" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.065133 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.068663 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wcjm"] Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.072658 4728 scope.go:117] "RemoveContainer" containerID="1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.097813 4728 scope.go:117] "RemoveContainer" containerID="6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba" Dec 05 11:13:38 crc kubenswrapper[4728]: E1205 11:13:38.099906 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba\": container with ID starting with 6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba not found: ID does not exist" containerID="6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.099947 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba"} err="failed to get container status \"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba\": rpc error: code = NotFound desc = could not find container \"6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba\": container with ID starting with 6d54d6c6ae7f7130e883d94a95f02f2d4cfefdd8b05e5351294b5a58ef70afba not found: ID does not exist" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.099979 4728 scope.go:117] "RemoveContainer" containerID="5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea" Dec 05 11:13:38 crc kubenswrapper[4728]: E1205 11:13:38.100401 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea\": container with ID starting with 5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea not found: ID does not exist" containerID="5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.100423 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea"} err="failed to get container status \"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea\": rpc error: code = NotFound desc = could not find container \"5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea\": container with ID starting with 5e5f6be9a5fe43f0818733b84d42c0d13f975093d961dcfc5dd449015e11a0ea not found: ID does not exist" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.100437 4728 scope.go:117] "RemoveContainer" containerID="1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1" Dec 05 11:13:38 crc kubenswrapper[4728]: E1205 11:13:38.100749 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1\": container with ID starting with 1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1 not found: ID does not exist" containerID="1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.100873 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1"} err="failed to get container status \"1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1\": rpc error: code = NotFound desc = could not find container \"1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1\": container with ID starting with 1b866d9eb7cc957a27aa70c6f5f95dad61fcb91ef4390f13e66b30ce4e05b7d1 not found: ID does not exist" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.359425 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09543444-057d-42b7-a103-6af978f7c627" path="/var/lib/kubelet/pods/09543444-057d-42b7-a103-6af978f7c627/volumes" Dec 05 11:13:38 crc kubenswrapper[4728]: I1205 11:13:38.360065 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ed41172-da91-43b9-8b5b-048a3c9e58e2" path="/var/lib/kubelet/pods/8ed41172-da91-43b9-8b5b-048a3c9e58e2/volumes" Dec 05 11:13:39 crc kubenswrapper[4728]: I1205 11:13:39.033434 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" event={"ID":"6d34f9fa-bc9f-4496-ad31-f384698fe961","Type":"ContainerStarted","Data":"0d2181a5b5016a3924e85ae82fafacf3b6d14382275cf5516741f067527c8e38"} Dec 05 11:13:39 crc kubenswrapper[4728]: I1205 11:13:39.039235 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-77548dbf6-hql59" Dec 05 11:13:55 crc kubenswrapper[4728]: I1205 11:13:55.701909 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:13:55 crc kubenswrapper[4728]: I1205 11:13:55.702472 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.707372 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.708518 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6zdxv" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="registry-server" containerID="cri-o://89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088" gracePeriod=30 Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.715149 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.715419 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m9cnn" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="registry-server" containerID="cri-o://b095182e2c599320984e84d99d1ea82a90c37eb1c4ae4bd4af432056e5be61c9" gracePeriod=30 Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.733025 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.733309 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" containerID="cri-o://8804388d269cf66039caca0903b822ad608b69cb6c7c3a54fd3ae6df8248bbed" gracePeriod=30 Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.740221 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.740461 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mjf89" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="registry-server" containerID="cri-o://1ce3204e059fc718ca7e684da3b0529b22391a3c9958436f86ebb4f4f01d8cda" gracePeriod=30 Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.749618 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.749849 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tfbzt" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="registry-server" containerID="cri-o://e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" gracePeriod=30 Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.762899 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nxkrc"] Dec 05 11:14:04 crc kubenswrapper[4728]: E1205 11:14:04.763105 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="registry-server" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.763118 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="registry-server" Dec 05 11:14:04 crc kubenswrapper[4728]: E1205 11:14:04.763134 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="extract-content" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.763140 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="extract-content" Dec 05 11:14:04 crc kubenswrapper[4728]: E1205 11:14:04.763156 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="extract-utilities" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.763162 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="extract-utilities" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.763255 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="09543444-057d-42b7-a103-6af978f7c627" containerName="registry-server" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.763612 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.787344 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nxkrc"] Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.805322 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.805381 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.805427 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcghf\" (UniqueName: \"kubernetes.io/projected/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-kube-api-access-bcghf\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.906165 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcghf\" (UniqueName: \"kubernetes.io/projected/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-kube-api-access-bcghf\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.906228 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.906264 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.907382 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.915951 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.925601 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcghf\" (UniqueName: \"kubernetes.io/projected/416c984c-f4c2-4b3f-8dd6-c27724ac7c42-kube-api-access-bcghf\") pod \"marketplace-operator-79b997595-nxkrc\" (UID: \"416c984c-f4c2-4b3f-8dd6-c27724ac7c42\") " pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.943265 4728 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2clxj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 05 11:14:04 crc kubenswrapper[4728]: I1205 11:14:04.943320 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.076587 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f is running failed: container process not found" containerID="e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.077672 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f is running failed: container process not found" containerID="e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.078438 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f is running failed: container process not found" containerID="e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.078490 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-tfbzt" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="registry-server" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.084576 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.179636 4728 generic.go:334] "Generic (PLEG): container finished" podID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerID="8804388d269cf66039caca0903b822ad608b69cb6c7c3a54fd3ae6df8248bbed" exitCode=0 Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.179712 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerDied","Data":"8804388d269cf66039caca0903b822ad608b69cb6c7c3a54fd3ae6df8248bbed"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.179761 4728 scope.go:117] "RemoveContainer" containerID="6eb7a8c4de7b91719bcde13603bacdcace249ece06ce508ae7a9dfc739264d6b" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.183397 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.191894 4728 generic.go:334] "Generic (PLEG): container finished" podID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerID="e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" exitCode=0 Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.192028 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerDied","Data":"e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.194435 4728 generic.go:334] "Generic (PLEG): container finished" podID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerID="b095182e2c599320984e84d99d1ea82a90c37eb1c4ae4bd4af432056e5be61c9" exitCode=0 Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.194477 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerDied","Data":"b095182e2c599320984e84d99d1ea82a90c37eb1c4ae4bd4af432056e5be61c9"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.203877 4728 generic.go:334] "Generic (PLEG): container finished" podID="09c93128-1454-446f-bb75-771442084d74" containerID="1ce3204e059fc718ca7e684da3b0529b22391a3c9958436f86ebb4f4f01d8cda" exitCode=0 Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.203964 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerDied","Data":"1ce3204e059fc718ca7e684da3b0529b22391a3c9958436f86ebb4f4f01d8cda"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.222487 4728 generic.go:334] "Generic (PLEG): container finished" podID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerID="89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088" exitCode=0 Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.222527 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerDied","Data":"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.222563 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zdxv" event={"ID":"a443dace-dc6a-4488-a0d1-183a1198bd0d","Type":"ContainerDied","Data":"ca9d0729086b023564a5ee831852f0e934b24c3c643ea8dacd7e6c47412f78e9"} Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.222624 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zdxv" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.266939 4728 scope.go:117] "RemoveContainer" containerID="89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.292820 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.304420 4728 scope.go:117] "RemoveContainer" containerID="e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.311744 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content\") pod \"a443dace-dc6a-4488-a0d1-183a1198bd0d\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.311778 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities\") pod \"a443dace-dc6a-4488-a0d1-183a1198bd0d\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.311811 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9f5s\" (UniqueName: \"kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s\") pod \"a443dace-dc6a-4488-a0d1-183a1198bd0d\" (UID: \"a443dace-dc6a-4488-a0d1-183a1198bd0d\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.313447 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities" (OuterVolumeSpecName: "utilities") pod "a443dace-dc6a-4488-a0d1-183a1198bd0d" (UID: "a443dace-dc6a-4488-a0d1-183a1198bd0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.317637 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s" (OuterVolumeSpecName: "kube-api-access-m9f5s") pod "a443dace-dc6a-4488-a0d1-183a1198bd0d" (UID: "a443dace-dc6a-4488-a0d1-183a1198bd0d"). InnerVolumeSpecName "kube-api-access-m9f5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.326758 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.329549 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.334450 4728 scope.go:117] "RemoveContainer" containerID="18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.336861 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.355831 4728 scope.go:117] "RemoveContainer" containerID="89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088" Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.356308 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088\": container with ID starting with 89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088 not found: ID does not exist" containerID="89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.356350 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088"} err="failed to get container status \"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088\": rpc error: code = NotFound desc = could not find container \"89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088\": container with ID starting with 89fe1afe709040bdb3354809a7e708dd765847d79bb13fa93ad41dee5e743088 not found: ID does not exist" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.356369 4728 scope.go:117] "RemoveContainer" containerID="e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7" Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.356754 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7\": container with ID starting with e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7 not found: ID does not exist" containerID="e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.356775 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7"} err="failed to get container status \"e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7\": rpc error: code = NotFound desc = could not find container \"e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7\": container with ID starting with e921cc3b4ddad9044853732c69baa3a26011871e0ccf81fac8469fa173ac39b7 not found: ID does not exist" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.356786 4728 scope.go:117] "RemoveContainer" containerID="18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f" Dec 05 11:14:05 crc kubenswrapper[4728]: E1205 11:14:05.357022 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f\": container with ID starting with 18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f not found: ID does not exist" containerID="18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.357046 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f"} err="failed to get container status \"18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f\": rpc error: code = NotFound desc = could not find container \"18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f\": container with ID starting with 18f52d0dff878c463e957c16605c8c5cbb54d7a5515df1dc5f87b7d7b5e8589f not found: ID does not exist" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.388137 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a443dace-dc6a-4488-a0d1-183a1198bd0d" (UID: "a443dace-dc6a-4488-a0d1-183a1198bd0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.412948 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content\") pod \"09c93128-1454-446f-bb75-771442084d74\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.413019 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv5vf\" (UniqueName: \"kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf\") pod \"09c93128-1454-446f-bb75-771442084d74\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.413097 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities\") pod \"09c93128-1454-446f-bb75-771442084d74\" (UID: \"09c93128-1454-446f-bb75-771442084d74\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.413377 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.413414 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a443dace-dc6a-4488-a0d1-183a1198bd0d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.413427 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9f5s\" (UniqueName: \"kubernetes.io/projected/a443dace-dc6a-4488-a0d1-183a1198bd0d-kube-api-access-m9f5s\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.414059 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities" (OuterVolumeSpecName: "utilities") pod "09c93128-1454-446f-bb75-771442084d74" (UID: "09c93128-1454-446f-bb75-771442084d74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.415748 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf" (OuterVolumeSpecName: "kube-api-access-wv5vf") pod "09c93128-1454-446f-bb75-771442084d74" (UID: "09c93128-1454-446f-bb75-771442084d74"). InnerVolumeSpecName "kube-api-access-wv5vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.431309 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09c93128-1454-446f-bb75-771442084d74" (UID: "09c93128-1454-446f-bb75-771442084d74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.514847 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrjkq\" (UniqueName: \"kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq\") pod \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.514910 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities\") pod \"29c0e6c0-78ea-4a37-aa57-22af46f50133\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.514949 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content\") pod \"29c0e6c0-78ea-4a37-aa57-22af46f50133\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.514984 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities\") pod \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515009 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cw9t\" (UniqueName: \"kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t\") pod \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515031 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca\") pod \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515058 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content\") pod \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\" (UID: \"48f4f7f9-a366-44ba-b8ee-f349da78fa76\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515079 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics\") pod \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\" (UID: \"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515148 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv7z6\" (UniqueName: \"kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6\") pod \"29c0e6c0-78ea-4a37-aa57-22af46f50133\" (UID: \"29c0e6c0-78ea-4a37-aa57-22af46f50133\") " Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515833 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities" (OuterVolumeSpecName: "utilities") pod "29c0e6c0-78ea-4a37-aa57-22af46f50133" (UID: "29c0e6c0-78ea-4a37-aa57-22af46f50133"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515844 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv5vf\" (UniqueName: \"kubernetes.io/projected/09c93128-1454-446f-bb75-771442084d74-kube-api-access-wv5vf\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515882 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.515894 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09c93128-1454-446f-bb75-771442084d74-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.516768 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities" (OuterVolumeSpecName: "utilities") pod "48f4f7f9-a366-44ba-b8ee-f349da78fa76" (UID: "48f4f7f9-a366-44ba-b8ee-f349da78fa76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.519091 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6" (OuterVolumeSpecName: "kube-api-access-rv7z6") pod "29c0e6c0-78ea-4a37-aa57-22af46f50133" (UID: "29c0e6c0-78ea-4a37-aa57-22af46f50133"). InnerVolumeSpecName "kube-api-access-rv7z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.519553 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq" (OuterVolumeSpecName: "kube-api-access-jrjkq") pod "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" (UID: "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8"). InnerVolumeSpecName "kube-api-access-jrjkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.519570 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" (UID: "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.526978 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t" (OuterVolumeSpecName: "kube-api-access-2cw9t") pod "48f4f7f9-a366-44ba-b8ee-f349da78fa76" (UID: "48f4f7f9-a366-44ba-b8ee-f349da78fa76"). InnerVolumeSpecName "kube-api-access-2cw9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.531820 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" (UID: "8c8f3b0b-edad-4a06-8374-2d2d3cd805d8"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.561907 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.565896 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6zdxv"] Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.570156 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nxkrc"] Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.575991 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48f4f7f9-a366-44ba-b8ee-f349da78fa76" (UID: "48f4f7f9-a366-44ba-b8ee-f349da78fa76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.616931 4728 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.616964 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.616976 4728 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.616988 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv7z6\" (UniqueName: \"kubernetes.io/projected/29c0e6c0-78ea-4a37-aa57-22af46f50133-kube-api-access-rv7z6\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.616999 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrjkq\" (UniqueName: \"kubernetes.io/projected/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8-kube-api-access-jrjkq\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.617010 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.617022 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f4f7f9-a366-44ba-b8ee-f349da78fa76-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.617032 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cw9t\" (UniqueName: \"kubernetes.io/projected/48f4f7f9-a366-44ba-b8ee-f349da78fa76-kube-api-access-2cw9t\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.626347 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29c0e6c0-78ea-4a37-aa57-22af46f50133" (UID: "29c0e6c0-78ea-4a37-aa57-22af46f50133"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:05 crc kubenswrapper[4728]: I1205 11:14:05.719096 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c0e6c0-78ea-4a37-aa57-22af46f50133-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.229111 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" event={"ID":"416c984c-f4c2-4b3f-8dd6-c27724ac7c42","Type":"ContainerStarted","Data":"5cea0c0d67a2a168df2d2ef42633f1ba170cb5fcb1e4cf150931b5d4e810b985"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.229156 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" event={"ID":"416c984c-f4c2-4b3f-8dd6-c27724ac7c42","Type":"ContainerStarted","Data":"c19ebabaae3ab373133162072058c5553a5f124b04ccb8c49e973775ec642ebc"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.230473 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.231859 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" event={"ID":"8c8f3b0b-edad-4a06-8374-2d2d3cd805d8","Type":"ContainerDied","Data":"9e443aa647c8f076eef94a9880a5d577964ac3e64b1a8f8f257d40b958a35e91"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.231897 4728 scope.go:117] "RemoveContainer" containerID="8804388d269cf66039caca0903b822ad608b69cb6c7c3a54fd3ae6df8248bbed" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.231916 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2clxj" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.235075 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.238605 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m9cnn" event={"ID":"48f4f7f9-a366-44ba-b8ee-f349da78fa76","Type":"ContainerDied","Data":"60e64ed66be09c9b1666cac08a30c4a79375a5e9183edd78e5462e8c08753389"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.239028 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m9cnn" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.241029 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfbzt" event={"ID":"29c0e6c0-78ea-4a37-aa57-22af46f50133","Type":"ContainerDied","Data":"d85b6c1cda777acb7d234a054a3ebf04e2a683c1c62802ea1144c7f49528158c"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.241113 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfbzt" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.243740 4728 scope.go:117] "RemoveContainer" containerID="b095182e2c599320984e84d99d1ea82a90c37eb1c4ae4bd4af432056e5be61c9" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.249493 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjf89" event={"ID":"09c93128-1454-446f-bb75-771442084d74","Type":"ContainerDied","Data":"4b00e1446dc77e862ec76757433e5d47267e5efe9adcedd82f2ff6ef6e9c0264"} Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.249609 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjf89" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.254526 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nxkrc" podStartSLOduration=2.254507337 podStartE2EDuration="2.254507337s" podCreationTimestamp="2025-12-05 11:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:14:06.2527518 +0000 UTC m=+380.394874503" watchObservedRunningTime="2025-12-05 11:14:06.254507337 +0000 UTC m=+380.396630030" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.262119 4728 scope.go:117] "RemoveContainer" containerID="b0362d028b7224e0d8e57e0c5b0d745888deb423e4b720cb72e9902f7a772256" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.300997 4728 scope.go:117] "RemoveContainer" containerID="baeda4fa68eaf44b8e42427d43b46f08b26922fa7cbbe09f1c07352b953d8265" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.302997 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.308231 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2clxj"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.313479 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.317574 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m9cnn"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.327072 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.331588 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjf89"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.335758 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.337361 4728 scope.go:117] "RemoveContainer" containerID="e77b1c412600e174eb403d781f0c0273cbbc7b072f51d8f573ced7354d5b724f" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.339441 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tfbzt"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.354712 4728 scope.go:117] "RemoveContainer" containerID="35397ac4885f44c63da90dc252050f9b1b8f37e6b4a675d98ed20fa3690ba67e" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.358770 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09c93128-1454-446f-bb75-771442084d74" path="/var/lib/kubelet/pods/09c93128-1454-446f-bb75-771442084d74/volumes" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.359390 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" path="/var/lib/kubelet/pods/29c0e6c0-78ea-4a37-aa57-22af46f50133/volumes" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.359944 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" path="/var/lib/kubelet/pods/48f4f7f9-a366-44ba-b8ee-f349da78fa76/volumes" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.360990 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" path="/var/lib/kubelet/pods/8c8f3b0b-edad-4a06-8374-2d2d3cd805d8/volumes" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.361418 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" path="/var/lib/kubelet/pods/a443dace-dc6a-4488-a0d1-183a1198bd0d/volumes" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.372000 4728 scope.go:117] "RemoveContainer" containerID="ef5b720cd65b8cbfc8aea692fe775d80420c8f2f47ab25a370581a3f771043a4" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.388107 4728 scope.go:117] "RemoveContainer" containerID="1ce3204e059fc718ca7e684da3b0529b22391a3c9958436f86ebb4f4f01d8cda" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.401120 4728 scope.go:117] "RemoveContainer" containerID="b1832c94119ca4f8b4ebd72b2c33c0ba7f3dd2ae97f72fc912bc7a363c7dac8f" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.412033 4728 scope.go:117] "RemoveContainer" containerID="d82e7148a1c3c0c0994c4f3c53aa47cb425343ae0cb4f9b0324fb28e2c81ad1c" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.924915 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926028 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926071 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926085 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926094 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926198 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926211 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926223 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926230 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926243 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926250 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926259 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926266 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926277 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926286 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="extract-utilities" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926297 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926305 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926318 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926343 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926352 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926360 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926370 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926379 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926386 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926393 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926404 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926413 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: E1205 11:14:06.926423 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926431 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="extract-content" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926580 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f4f7f9-a366-44ba-b8ee-f349da78fa76" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926616 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="09c93128-1454-446f-bb75-771442084d74" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926624 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926641 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c0e6c0-78ea-4a37-aa57-22af46f50133" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926651 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a443dace-dc6a-4488-a0d1-183a1198bd0d" containerName="registry-server" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.926878 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8f3b0b-edad-4a06-8374-2d2d3cd805d8" containerName="marketplace-operator" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.927728 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.930003 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.931506 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.932418 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jmjl\" (UniqueName: \"kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.932484 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:06 crc kubenswrapper[4728]: I1205 11:14:06.932532 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.032833 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.032938 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jmjl\" (UniqueName: \"kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.032967 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.033360 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.033598 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.052656 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jmjl\" (UniqueName: \"kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl\") pod \"redhat-marketplace-4wpqx\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.122660 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hvhhm"] Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.124347 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.128087 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.133831 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv4tt\" (UniqueName: \"kubernetes.io/projected/bf6fb737-e78f-496c-9ae3-5067c4300f62-kube-api-access-cv4tt\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.133951 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-catalog-content\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.134045 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-utilities\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.135291 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hvhhm"] Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.235552 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-catalog-content\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.235671 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-utilities\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.235734 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv4tt\" (UniqueName: \"kubernetes.io/projected/bf6fb737-e78f-496c-9ae3-5067c4300f62-kube-api-access-cv4tt\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.236044 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-catalog-content\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.236112 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf6fb737-e78f-496c-9ae3-5067c4300f62-utilities\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.249880 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.255098 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv4tt\" (UniqueName: \"kubernetes.io/projected/bf6fb737-e78f-496c-9ae3-5067c4300f62-kube-api-access-cv4tt\") pod \"redhat-operators-hvhhm\" (UID: \"bf6fb737-e78f-496c-9ae3-5067c4300f62\") " pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.455159 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.623909 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:14:07 crc kubenswrapper[4728]: W1205 11:14:07.629188 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3566a4b3_0657_4221_9536_cfebc1b59376.slice/crio-9b4acb170e880e23133db45a152de46b27363611b5e6eb3dbcbdd7e6cad3fb05 WatchSource:0}: Error finding container 9b4acb170e880e23133db45a152de46b27363611b5e6eb3dbcbdd7e6cad3fb05: Status 404 returned error can't find the container with id 9b4acb170e880e23133db45a152de46b27363611b5e6eb3dbcbdd7e6cad3fb05 Dec 05 11:14:07 crc kubenswrapper[4728]: I1205 11:14:07.817967 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hvhhm"] Dec 05 11:14:07 crc kubenswrapper[4728]: W1205 11:14:07.828092 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf6fb737_e78f_496c_9ae3_5067c4300f62.slice/crio-96d6ebd82bab527e8e1ee79c71e283e58541bfc1daf4db846609d091a1d0d8b0 WatchSource:0}: Error finding container 96d6ebd82bab527e8e1ee79c71e283e58541bfc1daf4db846609d091a1d0d8b0: Status 404 returned error can't find the container with id 96d6ebd82bab527e8e1ee79c71e283e58541bfc1daf4db846609d091a1d0d8b0 Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.277392 4728 generic.go:334] "Generic (PLEG): container finished" podID="3566a4b3-0657-4221-9536-cfebc1b59376" containerID="bb8e6c9de21d39fc39186d379975f58bece1c1c8a67fd7b3c06fdc600327175d" exitCode=0 Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.277676 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerDied","Data":"bb8e6c9de21d39fc39186d379975f58bece1c1c8a67fd7b3c06fdc600327175d"} Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.277701 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerStarted","Data":"9b4acb170e880e23133db45a152de46b27363611b5e6eb3dbcbdd7e6cad3fb05"} Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.279466 4728 generic.go:334] "Generic (PLEG): container finished" podID="bf6fb737-e78f-496c-9ae3-5067c4300f62" containerID="d2fce78802289e921ab4cd9861ed8928881d4544504b8f9428af53c10b60078d" exitCode=0 Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.280609 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hvhhm" event={"ID":"bf6fb737-e78f-496c-9ae3-5067c4300f62","Type":"ContainerDied","Data":"d2fce78802289e921ab4cd9861ed8928881d4544504b8f9428af53c10b60078d"} Dec 05 11:14:08 crc kubenswrapper[4728]: I1205 11:14:08.280639 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hvhhm" event={"ID":"bf6fb737-e78f-496c-9ae3-5067c4300f62","Type":"ContainerStarted","Data":"96d6ebd82bab527e8e1ee79c71e283e58541bfc1daf4db846609d091a1d0d8b0"} Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.331509 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tvtdk"] Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.334525 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.338956 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.341365 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tvtdk"] Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.372597 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-utilities\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.372663 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh74v\" (UniqueName: \"kubernetes.io/projected/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-kube-api-access-wh74v\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.372686 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-catalog-content\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.474928 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-utilities\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.474986 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh74v\" (UniqueName: \"kubernetes.io/projected/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-kube-api-access-wh74v\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.475015 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-catalog-content\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.475491 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-catalog-content\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.475629 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-utilities\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.495463 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh74v\" (UniqueName: \"kubernetes.io/projected/ad6f43a7-1af1-48d1-802f-a2c36bab80cd-kube-api-access-wh74v\") pod \"certified-operators-tvtdk\" (UID: \"ad6f43a7-1af1-48d1-802f-a2c36bab80cd\") " pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.529265 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s5vrv"] Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.530507 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.533374 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.542822 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s5vrv"] Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.576206 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-utilities\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.576280 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-catalog-content\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.576323 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmwnl\" (UniqueName: \"kubernetes.io/projected/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-kube-api-access-hmwnl\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.677775 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-utilities\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.677840 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-catalog-content\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.677867 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmwnl\" (UniqueName: \"kubernetes.io/projected/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-kube-api-access-hmwnl\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.678264 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-utilities\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.678445 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-catalog-content\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.690037 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.695479 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmwnl\" (UniqueName: \"kubernetes.io/projected/f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7-kube-api-access-hmwnl\") pod \"community-operators-s5vrv\" (UID: \"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7\") " pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.849111 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:09 crc kubenswrapper[4728]: I1205 11:14:09.912963 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tvtdk"] Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.244386 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s5vrv"] Dec 05 11:14:10 crc kubenswrapper[4728]: W1205 11:14:10.253527 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5e6b28c_ad1c_4ae0_a757_ab7cd65b94e7.slice/crio-7103a8c71ade12b03eae0b70c566b65c6e1ece66f0af5368f3f44527a7857c26 WatchSource:0}: Error finding container 7103a8c71ade12b03eae0b70c566b65c6e1ece66f0af5368f3f44527a7857c26: Status 404 returned error can't find the container with id 7103a8c71ade12b03eae0b70c566b65c6e1ece66f0af5368f3f44527a7857c26 Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.297167 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s5vrv" event={"ID":"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7","Type":"ContainerStarted","Data":"7103a8c71ade12b03eae0b70c566b65c6e1ece66f0af5368f3f44527a7857c26"} Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.301241 4728 generic.go:334] "Generic (PLEG): container finished" podID="3566a4b3-0657-4221-9536-cfebc1b59376" containerID="ffa3e9fa28a1594b3e7c3cf60abf37d1bddbd19b06abbe6269dbcee71500d7d9" exitCode=0 Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.301305 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerDied","Data":"ffa3e9fa28a1594b3e7c3cf60abf37d1bddbd19b06abbe6269dbcee71500d7d9"} Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.309623 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hvhhm" event={"ID":"bf6fb737-e78f-496c-9ae3-5067c4300f62","Type":"ContainerStarted","Data":"17dded862bcd57c213996b79295694da12ce9b074c7cdcbca1b84033ba06537b"} Dec 05 11:14:10 crc kubenswrapper[4728]: I1205 11:14:10.318406 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tvtdk" event={"ID":"ad6f43a7-1af1-48d1-802f-a2c36bab80cd","Type":"ContainerStarted","Data":"d078c46e0345f16663000987e17927cbc961f68a53db6288b58af537399ed709"} Dec 05 11:14:11 crc kubenswrapper[4728]: I1205 11:14:11.325741 4728 generic.go:334] "Generic (PLEG): container finished" podID="bf6fb737-e78f-496c-9ae3-5067c4300f62" containerID="17dded862bcd57c213996b79295694da12ce9b074c7cdcbca1b84033ba06537b" exitCode=0 Dec 05 11:14:11 crc kubenswrapper[4728]: I1205 11:14:11.325839 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hvhhm" event={"ID":"bf6fb737-e78f-496c-9ae3-5067c4300f62","Type":"ContainerDied","Data":"17dded862bcd57c213996b79295694da12ce9b074c7cdcbca1b84033ba06537b"} Dec 05 11:14:11 crc kubenswrapper[4728]: I1205 11:14:11.327939 4728 generic.go:334] "Generic (PLEG): container finished" podID="ad6f43a7-1af1-48d1-802f-a2c36bab80cd" containerID="362826e660a68aae7a6a5a4a4c20406e1097b77093595814c933f65f67da7914" exitCode=0 Dec 05 11:14:11 crc kubenswrapper[4728]: I1205 11:14:11.328881 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tvtdk" event={"ID":"ad6f43a7-1af1-48d1-802f-a2c36bab80cd","Type":"ContainerDied","Data":"362826e660a68aae7a6a5a4a4c20406e1097b77093595814c933f65f67da7914"} Dec 05 11:14:11 crc kubenswrapper[4728]: I1205 11:14:11.330418 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s5vrv" event={"ID":"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7","Type":"ContainerStarted","Data":"76b687c0d02db17d20552deb58bf1d761aedd6e9be9a6485b0aa5ad018c2ef14"} Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.337164 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerStarted","Data":"2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923"} Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.339218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hvhhm" event={"ID":"bf6fb737-e78f-496c-9ae3-5067c4300f62","Type":"ContainerStarted","Data":"006c2c62033c958ec6d6f80002e9637092ec479c726187cde1ab1f5322ee10ad"} Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.340928 4728 generic.go:334] "Generic (PLEG): container finished" podID="ad6f43a7-1af1-48d1-802f-a2c36bab80cd" containerID="ca34da4d7a9d8ddc71ccd2ef6bebaa10257cd595f85e123715588eddae163573" exitCode=0 Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.341012 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tvtdk" event={"ID":"ad6f43a7-1af1-48d1-802f-a2c36bab80cd","Type":"ContainerDied","Data":"ca34da4d7a9d8ddc71ccd2ef6bebaa10257cd595f85e123715588eddae163573"} Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.342347 4728 generic.go:334] "Generic (PLEG): container finished" podID="f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7" containerID="76b687c0d02db17d20552deb58bf1d761aedd6e9be9a6485b0aa5ad018c2ef14" exitCode=0 Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.342400 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s5vrv" event={"ID":"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7","Type":"ContainerDied","Data":"76b687c0d02db17d20552deb58bf1d761aedd6e9be9a6485b0aa5ad018c2ef14"} Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.361246 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4wpqx" podStartSLOduration=3.192488809 podStartE2EDuration="6.36122619s" podCreationTimestamp="2025-12-05 11:14:06 +0000 UTC" firstStartedPulling="2025-12-05 11:14:08.280712655 +0000 UTC m=+382.422835348" lastFinishedPulling="2025-12-05 11:14:11.449450036 +0000 UTC m=+385.591572729" observedRunningTime="2025-12-05 11:14:12.36084295 +0000 UTC m=+386.502965653" watchObservedRunningTime="2025-12-05 11:14:12.36122619 +0000 UTC m=+386.503348883" Dec 05 11:14:12 crc kubenswrapper[4728]: I1205 11:14:12.404911 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hvhhm" podStartSLOduration=1.835266496 podStartE2EDuration="5.404893489s" podCreationTimestamp="2025-12-05 11:14:07 +0000 UTC" firstStartedPulling="2025-12-05 11:14:08.281926298 +0000 UTC m=+382.424048991" lastFinishedPulling="2025-12-05 11:14:11.851553291 +0000 UTC m=+385.993675984" observedRunningTime="2025-12-05 11:14:12.404850778 +0000 UTC m=+386.546973471" watchObservedRunningTime="2025-12-05 11:14:12.404893489 +0000 UTC m=+386.547016182" Dec 05 11:14:13 crc kubenswrapper[4728]: I1205 11:14:13.350016 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tvtdk" event={"ID":"ad6f43a7-1af1-48d1-802f-a2c36bab80cd","Type":"ContainerStarted","Data":"d1d1d40ef2212f4e544bc4eaa6f0ff88f6bcd8985f6faa3ee60ea68776446d16"} Dec 05 11:14:13 crc kubenswrapper[4728]: I1205 11:14:13.352121 4728 generic.go:334] "Generic (PLEG): container finished" podID="f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7" containerID="ef1b0b581f6bb4d8f1b6614ef0c41ee8e3f060847cf5b40f82e65b12dea13311" exitCode=0 Dec 05 11:14:13 crc kubenswrapper[4728]: I1205 11:14:13.352906 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s5vrv" event={"ID":"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7","Type":"ContainerDied","Data":"ef1b0b581f6bb4d8f1b6614ef0c41ee8e3f060847cf5b40f82e65b12dea13311"} Dec 05 11:14:13 crc kubenswrapper[4728]: I1205 11:14:13.369551 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tvtdk" podStartSLOduration=3.073954366 podStartE2EDuration="4.3695318s" podCreationTimestamp="2025-12-05 11:14:09 +0000 UTC" firstStartedPulling="2025-12-05 11:14:11.445239683 +0000 UTC m=+385.587362386" lastFinishedPulling="2025-12-05 11:14:12.740817127 +0000 UTC m=+386.882939820" observedRunningTime="2025-12-05 11:14:13.365929763 +0000 UTC m=+387.508052466" watchObservedRunningTime="2025-12-05 11:14:13.3695318 +0000 UTC m=+387.511654493" Dec 05 11:14:15 crc kubenswrapper[4728]: I1205 11:14:15.365515 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s5vrv" event={"ID":"f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7","Type":"ContainerStarted","Data":"1f5b9a9e1af53c45d063b9295d90a9dfa0f5b1b27ed515d74cc768ae05bd5e25"} Dec 05 11:14:15 crc kubenswrapper[4728]: I1205 11:14:15.385698 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s5vrv" podStartSLOduration=4.95781725 podStartE2EDuration="6.385677696s" podCreationTimestamp="2025-12-05 11:14:09 +0000 UTC" firstStartedPulling="2025-12-05 11:14:12.343515132 +0000 UTC m=+386.485637825" lastFinishedPulling="2025-12-05 11:14:13.771375578 +0000 UTC m=+387.913498271" observedRunningTime="2025-12-05 11:14:15.382238113 +0000 UTC m=+389.524360826" watchObservedRunningTime="2025-12-05 11:14:15.385677696 +0000 UTC m=+389.527800389" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.250540 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.250601 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.288372 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.413260 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.456336 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.456840 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:17 crc kubenswrapper[4728]: I1205 11:14:17.493544 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:18 crc kubenswrapper[4728]: I1205 11:14:18.426485 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hvhhm" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.690977 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.691271 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.731011 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.850414 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.850476 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:19 crc kubenswrapper[4728]: I1205 11:14:19.886286 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:20 crc kubenswrapper[4728]: I1205 11:14:20.432276 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s5vrv" Dec 05 11:14:20 crc kubenswrapper[4728]: I1205 11:14:20.433191 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tvtdk" Dec 05 11:14:25 crc kubenswrapper[4728]: I1205 11:14:25.702601 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:14:25 crc kubenswrapper[4728]: I1205 11:14:25.702999 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:14:55 crc kubenswrapper[4728]: I1205 11:14:55.701888 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:14:55 crc kubenswrapper[4728]: I1205 11:14:55.702446 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:14:55 crc kubenswrapper[4728]: I1205 11:14:55.702498 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:14:55 crc kubenswrapper[4728]: I1205 11:14:55.703185 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:14:55 crc kubenswrapper[4728]: I1205 11:14:55.703252 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf" gracePeriod=600 Dec 05 11:14:56 crc kubenswrapper[4728]: I1205 11:14:56.671109 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf" exitCode=0 Dec 05 11:14:56 crc kubenswrapper[4728]: I1205 11:14:56.671405 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf"} Dec 05 11:14:56 crc kubenswrapper[4728]: I1205 11:14:56.671432 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139"} Dec 05 11:14:56 crc kubenswrapper[4728]: I1205 11:14:56.671448 4728 scope.go:117] "RemoveContainer" containerID="d41cccfccd3b8763566f9a4453832461a74aedcf7bcc18e8c925bb20e1b620de" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.178507 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c"] Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.192956 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.203522 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.203543 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.223114 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c"] Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.306710 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.306765 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7jrv\" (UniqueName: \"kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.306783 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.408043 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.408099 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7jrv\" (UniqueName: \"kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.408127 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.409616 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.414297 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.427432 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7jrv\" (UniqueName: \"kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv\") pod \"collect-profiles-29415555-lpg6c\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.530728 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:00 crc kubenswrapper[4728]: I1205 11:15:00.921162 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c"] Dec 05 11:15:01 crc kubenswrapper[4728]: I1205 11:15:01.702454 4728 generic.go:334] "Generic (PLEG): container finished" podID="2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" containerID="cfeb963bbf360c6a877e547c2446de6ad1ea69e30230bd56cf89b519551f3845" exitCode=0 Dec 05 11:15:01 crc kubenswrapper[4728]: I1205 11:15:01.702504 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" event={"ID":"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a","Type":"ContainerDied","Data":"cfeb963bbf360c6a877e547c2446de6ad1ea69e30230bd56cf89b519551f3845"} Dec 05 11:15:01 crc kubenswrapper[4728]: I1205 11:15:01.702814 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" event={"ID":"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a","Type":"ContainerStarted","Data":"3e09ac93c3307f6745553ed45e24bb58e37f2a4d0111fc94a800fb020ddf67ae"} Dec 05 11:15:02 crc kubenswrapper[4728]: I1205 11:15:02.979863 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.143188 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume\") pod \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.143250 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume\") pod \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.143276 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7jrv\" (UniqueName: \"kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv\") pod \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\" (UID: \"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a\") " Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.144145 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume" (OuterVolumeSpecName: "config-volume") pod "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" (UID: "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.156572 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" (UID: "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.156666 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv" (OuterVolumeSpecName: "kube-api-access-z7jrv") pod "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" (UID: "2abbdcf9-ccd2-4ad9-8ada-590ae997d61a"). InnerVolumeSpecName "kube-api-access-z7jrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.244279 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.244314 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7jrv\" (UniqueName: \"kubernetes.io/projected/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-kube-api-access-z7jrv\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.244325 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.715807 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" event={"ID":"2abbdcf9-ccd2-4ad9-8ada-590ae997d61a","Type":"ContainerDied","Data":"3e09ac93c3307f6745553ed45e24bb58e37f2a4d0111fc94a800fb020ddf67ae"} Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.715846 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e09ac93c3307f6745553ed45e24bb58e37f2a4d0111fc94a800fb020ddf67ae" Dec 05 11:15:03 crc kubenswrapper[4728]: I1205 11:15:03.715877 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c" Dec 05 11:16:46 crc kubenswrapper[4728]: I1205 11:16:46.410333 4728 scope.go:117] "RemoveContainer" containerID="c6e728070843e73bbe7f9c6d28fc805ab68b553575998435e221af8d30b7c317" Dec 05 11:16:55 crc kubenswrapper[4728]: I1205 11:16:55.701962 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:16:55 crc kubenswrapper[4728]: I1205 11:16:55.702455 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:17:25 crc kubenswrapper[4728]: I1205 11:17:25.701764 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:17:25 crc kubenswrapper[4728]: I1205 11:17:25.702556 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.622474 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6fr26"] Dec 05 11:17:42 crc kubenswrapper[4728]: E1205 11:17:42.623377 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" containerName="collect-profiles" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.623398 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" containerName="collect-profiles" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.623545 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" containerName="collect-profiles" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.624169 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.639876 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6fr26"] Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781257 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-trusted-ca\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781310 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781339 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-certificates\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781373 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c6c43b76-f64f-44e7-96b4-b7b1380d5455-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781405 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-tls\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781423 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqnmj\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-kube-api-access-hqnmj\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781565 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-bound-sa-token\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.781782 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c6c43b76-f64f-44e7-96b4-b7b1380d5455-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.802278 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882656 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c6c43b76-f64f-44e7-96b4-b7b1380d5455-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-tls\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882731 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqnmj\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-kube-api-access-hqnmj\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882753 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-bound-sa-token\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882775 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c6c43b76-f64f-44e7-96b4-b7b1380d5455-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882822 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-trusted-ca\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.882856 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-certificates\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.883430 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c6c43b76-f64f-44e7-96b4-b7b1380d5455-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.884373 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-certificates\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.884383 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6c43b76-f64f-44e7-96b4-b7b1380d5455-trusted-ca\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.889176 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-registry-tls\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.891257 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c6c43b76-f64f-44e7-96b4-b7b1380d5455-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.905639 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqnmj\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-kube-api-access-hqnmj\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.917235 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c6c43b76-f64f-44e7-96b4-b7b1380d5455-bound-sa-token\") pod \"image-registry-66df7c8f76-6fr26\" (UID: \"c6c43b76-f64f-44e7-96b4-b7b1380d5455\") " pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:42 crc kubenswrapper[4728]: I1205 11:17:42.945266 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:43 crc kubenswrapper[4728]: I1205 11:17:43.136127 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6fr26"] Dec 05 11:17:43 crc kubenswrapper[4728]: I1205 11:17:43.694630 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" event={"ID":"c6c43b76-f64f-44e7-96b4-b7b1380d5455","Type":"ContainerStarted","Data":"feef174c98e37b120ec570d948f74610be9b360d276590c016343e101615045e"} Dec 05 11:17:43 crc kubenswrapper[4728]: I1205 11:17:43.696020 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:17:43 crc kubenswrapper[4728]: I1205 11:17:43.696051 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" event={"ID":"c6c43b76-f64f-44e7-96b4-b7b1380d5455","Type":"ContainerStarted","Data":"a7e3d6881e0b97442316fc397f46778b5e05983d3c6aaebaa5fff584460159a1"} Dec 05 11:17:43 crc kubenswrapper[4728]: I1205 11:17:43.718457 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" podStartSLOduration=1.7184385039999999 podStartE2EDuration="1.718438504s" podCreationTimestamp="2025-12-05 11:17:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:17:43.717589359 +0000 UTC m=+597.859712072" watchObservedRunningTime="2025-12-05 11:17:43.718438504 +0000 UTC m=+597.860561197" Dec 05 11:17:55 crc kubenswrapper[4728]: I1205 11:17:55.702620 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:17:55 crc kubenswrapper[4728]: I1205 11:17:55.703066 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:17:55 crc kubenswrapper[4728]: I1205 11:17:55.703126 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:17:55 crc kubenswrapper[4728]: I1205 11:17:55.704046 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:17:55 crc kubenswrapper[4728]: I1205 11:17:55.704152 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139" gracePeriod=600 Dec 05 11:17:56 crc kubenswrapper[4728]: I1205 11:17:56.804189 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139" exitCode=0 Dec 05 11:17:56 crc kubenswrapper[4728]: I1205 11:17:56.804258 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139"} Dec 05 11:17:56 crc kubenswrapper[4728]: I1205 11:17:56.804581 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8"} Dec 05 11:17:56 crc kubenswrapper[4728]: I1205 11:17:56.804608 4728 scope.go:117] "RemoveContainer" containerID="e95ef4252beb34711bfa41f21abadded9db33cd1c9b400ad26f4ed3d1cabb0bf" Dec 05 11:18:02 crc kubenswrapper[4728]: I1205 11:18:02.953525 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-6fr26" Dec 05 11:18:03 crc kubenswrapper[4728]: I1205 11:18:03.008327 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.049559 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" podUID="aec2314f-9290-4eb4-a632-70baf826e29a" containerName="registry" containerID="cri-o://7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee" gracePeriod=30 Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.424289 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.582361 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.582667 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583736 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583773 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583834 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583864 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583888 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwlsw\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.583982 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted\") pod \"aec2314f-9290-4eb4-a632-70baf826e29a\" (UID: \"aec2314f-9290-4eb4-a632-70baf826e29a\") " Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.596918 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.597719 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.599041 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.599191 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.599310 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.599879 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.600338 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw" (OuterVolumeSpecName: "kube-api-access-bwlsw") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "kube-api-access-bwlsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.613451 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "aec2314f-9290-4eb4-a632-70baf826e29a" (UID: "aec2314f-9290-4eb4-a632-70baf826e29a"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696303 4728 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aec2314f-9290-4eb4-a632-70baf826e29a-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696353 4728 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696367 4728 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696381 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aec2314f-9290-4eb4-a632-70baf826e29a-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696392 4728 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696406 4728 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aec2314f-9290-4eb4-a632-70baf826e29a-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.696417 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwlsw\" (UniqueName: \"kubernetes.io/projected/aec2314f-9290-4eb4-a632-70baf826e29a-kube-api-access-bwlsw\") on node \"crc\" DevicePath \"\"" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.977975 4728 generic.go:334] "Generic (PLEG): container finished" podID="aec2314f-9290-4eb4-a632-70baf826e29a" containerID="7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee" exitCode=0 Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.978041 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" event={"ID":"aec2314f-9290-4eb4-a632-70baf826e29a","Type":"ContainerDied","Data":"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee"} Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.978100 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.978311 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-x8nh8" event={"ID":"aec2314f-9290-4eb4-a632-70baf826e29a","Type":"ContainerDied","Data":"957e3ba0aa51d3d0bd4be462148604d25bd8feebccea184e06abc84531fafd96"} Dec 05 11:18:28 crc kubenswrapper[4728]: I1205 11:18:28.978325 4728 scope.go:117] "RemoveContainer" containerID="7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee" Dec 05 11:18:29 crc kubenswrapper[4728]: I1205 11:18:29.008116 4728 scope.go:117] "RemoveContainer" containerID="7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee" Dec 05 11:18:29 crc kubenswrapper[4728]: E1205 11:18:29.008875 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee\": container with ID starting with 7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee not found: ID does not exist" containerID="7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee" Dec 05 11:18:29 crc kubenswrapper[4728]: I1205 11:18:29.009030 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee"} err="failed to get container status \"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee\": rpc error: code = NotFound desc = could not find container \"7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee\": container with ID starting with 7dbc64c72fcc332a07fa4d42de7e10412f24048a50c530e8c7329dbd1ec154ee not found: ID does not exist" Dec 05 11:18:29 crc kubenswrapper[4728]: I1205 11:18:29.013543 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:18:29 crc kubenswrapper[4728]: I1205 11:18:29.020490 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-x8nh8"] Dec 05 11:18:30 crc kubenswrapper[4728]: I1205 11:18:30.365506 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aec2314f-9290-4eb4-a632-70baf826e29a" path="/var/lib/kubelet/pods/aec2314f-9290-4eb4-a632-70baf826e29a/volumes" Dec 05 11:20:18 crc kubenswrapper[4728]: I1205 11:20:18.902825 4728 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 11:20:25 crc kubenswrapper[4728]: I1205 11:20:25.702834 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:20:25 crc kubenswrapper[4728]: I1205 11:20:25.703535 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.818043 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zxdgf"] Dec 05 11:20:39 crc kubenswrapper[4728]: E1205 11:20:39.818714 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec2314f-9290-4eb4-a632-70baf826e29a" containerName="registry" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.818725 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec2314f-9290-4eb4-a632-70baf826e29a" containerName="registry" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.818862 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="aec2314f-9290-4eb4-a632-70baf826e29a" containerName="registry" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.819316 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.822664 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.822784 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.823062 4728 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-kxvr4" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.831857 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hhc56"] Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.832682 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hhc56" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.838053 4728 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-46ddg" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.838147 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fb8dg"] Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.838855 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.841576 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zxdgf"] Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.843679 4728 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-qzrvn" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.849398 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hhc56"] Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.859620 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fb8dg"] Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.952647 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4zw6\" (UniqueName: \"kubernetes.io/projected/21cf202a-ede4-4ba9-9180-2dcde628cd09-kube-api-access-k4zw6\") pod \"cert-manager-cainjector-7f985d654d-zxdgf\" (UID: \"21cf202a-ede4-4ba9-9180-2dcde628cd09\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.952708 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8nc9\" (UniqueName: \"kubernetes.io/projected/159ff938-2eac-4774-beeb-18122124ceef-kube-api-access-z8nc9\") pod \"cert-manager-5b446d88c5-hhc56\" (UID: \"159ff938-2eac-4774-beeb-18122124ceef\") " pod="cert-manager/cert-manager-5b446d88c5-hhc56" Dec 05 11:20:39 crc kubenswrapper[4728]: I1205 11:20:39.952746 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwcq5\" (UniqueName: \"kubernetes.io/projected/2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0-kube-api-access-vwcq5\") pod \"cert-manager-webhook-5655c58dd6-fb8dg\" (UID: \"2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.053610 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwcq5\" (UniqueName: \"kubernetes.io/projected/2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0-kube-api-access-vwcq5\") pod \"cert-manager-webhook-5655c58dd6-fb8dg\" (UID: \"2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.053717 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4zw6\" (UniqueName: \"kubernetes.io/projected/21cf202a-ede4-4ba9-9180-2dcde628cd09-kube-api-access-k4zw6\") pod \"cert-manager-cainjector-7f985d654d-zxdgf\" (UID: \"21cf202a-ede4-4ba9-9180-2dcde628cd09\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.053766 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8nc9\" (UniqueName: \"kubernetes.io/projected/159ff938-2eac-4774-beeb-18122124ceef-kube-api-access-z8nc9\") pod \"cert-manager-5b446d88c5-hhc56\" (UID: \"159ff938-2eac-4774-beeb-18122124ceef\") " pod="cert-manager/cert-manager-5b446d88c5-hhc56" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.072557 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwcq5\" (UniqueName: \"kubernetes.io/projected/2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0-kube-api-access-vwcq5\") pod \"cert-manager-webhook-5655c58dd6-fb8dg\" (UID: \"2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.075853 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4zw6\" (UniqueName: \"kubernetes.io/projected/21cf202a-ede4-4ba9-9180-2dcde628cd09-kube-api-access-k4zw6\") pod \"cert-manager-cainjector-7f985d654d-zxdgf\" (UID: \"21cf202a-ede4-4ba9-9180-2dcde628cd09\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.084883 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8nc9\" (UniqueName: \"kubernetes.io/projected/159ff938-2eac-4774-beeb-18122124ceef-kube-api-access-z8nc9\") pod \"cert-manager-5b446d88c5-hhc56\" (UID: \"159ff938-2eac-4774-beeb-18122124ceef\") " pod="cert-manager/cert-manager-5b446d88c5-hhc56" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.146226 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.164173 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hhc56" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.174051 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.450024 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fb8dg"] Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.459655 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.558213 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zxdgf"] Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.613241 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hhc56"] Dec 05 11:20:40 crc kubenswrapper[4728]: W1205 11:20:40.618479 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod159ff938_2eac_4774_beeb_18122124ceef.slice/crio-489503ac9e74d6abd06b0c506d7071ec2680a09bd2ae70acb9d1935e543804ca WatchSource:0}: Error finding container 489503ac9e74d6abd06b0c506d7071ec2680a09bd2ae70acb9d1935e543804ca: Status 404 returned error can't find the container with id 489503ac9e74d6abd06b0c506d7071ec2680a09bd2ae70acb9d1935e543804ca Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.835693 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hhc56" event={"ID":"159ff938-2eac-4774-beeb-18122124ceef","Type":"ContainerStarted","Data":"489503ac9e74d6abd06b0c506d7071ec2680a09bd2ae70acb9d1935e543804ca"} Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.837008 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" event={"ID":"21cf202a-ede4-4ba9-9180-2dcde628cd09","Type":"ContainerStarted","Data":"b4b96a9fd5c1abb958d2f2220219908d1ab1d488046bd81b407efe680771b1c2"} Dec 05 11:20:40 crc kubenswrapper[4728]: I1205 11:20:40.838121 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" event={"ID":"2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0","Type":"ContainerStarted","Data":"b86b5c809df012d277610fa2e2e06577c0c02a380f8f4db5cda9ab316d82d2f2"} Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.855858 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" event={"ID":"2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0","Type":"ContainerStarted","Data":"ed2e50f2c236e3ec65348a18f7914dbb91f30a00e9fe40c97e6e64ea52c38c36"} Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.856906 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.857530 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hhc56" event={"ID":"159ff938-2eac-4774-beeb-18122124ceef","Type":"ContainerStarted","Data":"62bf40eb51d37f8f2e7af9e6d55293797a25c32b21d2ceb3547a89cf74753bb8"} Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.858804 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" event={"ID":"21cf202a-ede4-4ba9-9180-2dcde628cd09","Type":"ContainerStarted","Data":"b06f0b5f93e6ced7d9614ed81afe1a9a21ca889154cadfea528083b9220ea0f2"} Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.869101 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" podStartSLOduration=2.089405891 podStartE2EDuration="4.869082346s" podCreationTimestamp="2025-12-05 11:20:39 +0000 UTC" firstStartedPulling="2025-12-05 11:20:40.459385945 +0000 UTC m=+774.601508638" lastFinishedPulling="2025-12-05 11:20:43.2390624 +0000 UTC m=+777.381185093" observedRunningTime="2025-12-05 11:20:43.868894621 +0000 UTC m=+778.011017324" watchObservedRunningTime="2025-12-05 11:20:43.869082346 +0000 UTC m=+778.011205059" Dec 05 11:20:43 crc kubenswrapper[4728]: I1205 11:20:43.885608 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-hhc56" podStartSLOduration=2.267224129 podStartE2EDuration="4.885588552s" podCreationTimestamp="2025-12-05 11:20:39 +0000 UTC" firstStartedPulling="2025-12-05 11:20:40.622177538 +0000 UTC m=+774.764300231" lastFinishedPulling="2025-12-05 11:20:43.240541941 +0000 UTC m=+777.382664654" observedRunningTime="2025-12-05 11:20:43.883249168 +0000 UTC m=+778.025371861" watchObservedRunningTime="2025-12-05 11:20:43.885588552 +0000 UTC m=+778.027711245" Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.928451 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-zxdgf" podStartSLOduration=8.013032608 podStartE2EDuration="10.928432716s" podCreationTimestamp="2025-12-05 11:20:39 +0000 UTC" firstStartedPulling="2025-12-05 11:20:40.566886809 +0000 UTC m=+774.709009502" lastFinishedPulling="2025-12-05 11:20:43.482286917 +0000 UTC m=+777.624409610" observedRunningTime="2025-12-05 11:20:43.899270261 +0000 UTC m=+778.041392954" watchObservedRunningTime="2025-12-05 11:20:49.928432716 +0000 UTC m=+784.070555419" Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932059 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wchlf"] Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932453 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="sbdb" containerID="cri-o://b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932529 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="nbdb" containerID="cri-o://4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932405 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-controller" containerID="cri-o://02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932555 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-acl-logging" containerID="cri-o://6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932613 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.932980 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="northd" containerID="cri-o://19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.934071 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-node" containerID="cri-o://62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" gracePeriod=30 Dec 05 11:20:49 crc kubenswrapper[4728]: I1205 11:20:49.963501 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" containerID="cri-o://729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" gracePeriod=30 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.176723 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-fb8dg" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.212573 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/3.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.215308 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovn-acl-logging/0.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.215888 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovn-controller/0.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.216883 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264269 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lv4vv"] Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264489 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264500 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264511 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264517 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264525 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264531 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264538 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264544 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264551 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="sbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264558 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="sbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264570 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-acl-logging" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264577 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-acl-logging" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264588 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-node" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264593 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-node" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264602 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264608 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264614 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264619 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264630 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="nbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264635 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="nbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264640 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="northd" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264646 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="northd" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264655 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kubecfg-setup" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264660 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kubecfg-setup" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264741 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264749 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264756 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264764 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264773 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="kube-rbac-proxy-node" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264781 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovn-acl-logging" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264805 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="northd" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264815 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="nbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264820 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264828 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="sbdb" Dec 05 11:20:50 crc kubenswrapper[4728]: E1205 11:20:50.264911 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.264919 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.265003 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.265013 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerName="ovnkube-controller" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.266417 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383084 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383138 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383153 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383180 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383197 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383213 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383235 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv6h7\" (UniqueName: \"kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383248 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383267 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383284 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383301 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383317 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383335 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383317 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383356 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383357 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383384 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383420 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket" (OuterVolumeSpecName: "log-socket") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383457 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383452 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383470 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383501 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383485 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383466 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383488 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383531 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash" (OuterVolumeSpecName: "host-slash") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383531 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383484 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383568 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log" (OuterVolumeSpecName: "node-log") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383606 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383647 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383655 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383706 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert\") pod \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\" (UID: \"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5\") " Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383745 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383897 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383897 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-env-overrides\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383921 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383936 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55rw6\" (UniqueName: \"kubernetes.io/projected/8fe18459-be14-4e98-a84d-1c4d6a158819-kube-api-access-55rw6\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383972 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-bin\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.383999 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8fe18459-be14-4e98-a84d-1c4d6a158819-ovn-node-metrics-cert\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384018 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-script-lib\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384069 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-config\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384089 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-netns\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384104 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-var-lib-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384120 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-log-socket\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384150 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-slash\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384190 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384207 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-kubelet\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384233 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-systemd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384251 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384269 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-systemd-units\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384312 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384359 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-ovn\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384375 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-netd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384388 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384409 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-node-log\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384441 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-etc-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384500 4728 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384512 4728 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384520 4728 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384529 4728 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384537 4728 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384544 4728 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384552 4728 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384559 4728 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384567 4728 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384575 4728 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384584 4728 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384592 4728 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384600 4728 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384609 4728 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384616 4728 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384624 4728 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.384632 4728 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.389618 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7" (OuterVolumeSpecName: "kube-api-access-nv6h7") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "kube-api-access-nv6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.390486 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.400647 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" (UID: "1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485736 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-config\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485829 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-netns\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485853 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-var-lib-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485877 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-log-socket\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485903 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-slash\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485929 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485957 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-kubelet\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485956 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-netns\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485989 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-systemd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.485993 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-var-lib-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486036 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-systemd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486042 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486057 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-slash\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486066 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486077 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-systemd-units\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486095 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-log-socket\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486098 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-kubelet\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486125 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-systemd-units\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486128 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486147 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486200 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-run-ovn-kubernetes\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486202 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-ovn\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486217 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-run-ovn\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486234 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-netd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486254 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-node-log\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486288 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-etc-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486307 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-env-overrides\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486317 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-netd\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486322 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-bin\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486341 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-host-cni-bin\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486351 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55rw6\" (UniqueName: \"kubernetes.io/projected/8fe18459-be14-4e98-a84d-1c4d6a158819-kube-api-access-55rw6\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486363 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-node-log\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486384 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8fe18459-be14-4e98-a84d-1c4d6a158819-etc-openvswitch\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486400 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8fe18459-be14-4e98-a84d-1c4d6a158819-ovn-node-metrics-cert\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486424 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-script-lib\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486496 4728 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486508 4728 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486517 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv6h7\" (UniqueName: \"kubernetes.io/projected/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5-kube-api-access-nv6h7\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486854 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-config\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.486927 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-env-overrides\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.487244 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8fe18459-be14-4e98-a84d-1c4d6a158819-ovnkube-script-lib\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.490056 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8fe18459-be14-4e98-a84d-1c4d6a158819-ovn-node-metrics-cert\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.503387 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55rw6\" (UniqueName: \"kubernetes.io/projected/8fe18459-be14-4e98-a84d-1c4d6a158819-kube-api-access-55rw6\") pod \"ovnkube-node-lv4vv\" (UID: \"8fe18459-be14-4e98-a84d-1c4d6a158819\") " pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.582171 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:50 crc kubenswrapper[4728]: W1205 11:20:50.601189 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fe18459_be14_4e98_a84d_1c4d6a158819.slice/crio-979474eb4f28c791ef7d935d08757f6c476e4d814abfe8d8cbac2c41fcb144f5 WatchSource:0}: Error finding container 979474eb4f28c791ef7d935d08757f6c476e4d814abfe8d8cbac2c41fcb144f5: Status 404 returned error can't find the container with id 979474eb4f28c791ef7d935d08757f6c476e4d814abfe8d8cbac2c41fcb144f5 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.909347 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovnkube-controller/3.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.915539 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovn-acl-logging/0.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916167 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wchlf_1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/ovn-controller/0.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916726 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916780 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916838 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916861 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916885 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916893 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916953 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916988 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916957 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.916903 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917129 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" exitCode=143 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917149 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" exitCode=143 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917138 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917232 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917259 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917279 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917296 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917312 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917322 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917332 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917339 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917345 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917353 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917361 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917368 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917378 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917389 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917397 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917405 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917411 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917418 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917424 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917431 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917437 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917444 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917450 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917460 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917469 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917476 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917484 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917490 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917497 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917504 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917511 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917518 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917524 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917532 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917541 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wchlf" event={"ID":"1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5","Type":"ContainerDied","Data":"9e08482aa0ee5a9538ff22bcfff97d286de234fff549b63fca921a612fb714aa"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917553 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917561 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917568 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917575 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917582 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917590 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917596 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917603 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917609 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.917617 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.920575 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/2.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.921540 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/1.log" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.921600 4728 generic.go:334] "Generic (PLEG): container finished" podID="f292da29-a632-47aa-8bcc-2d999eaa6c11" containerID="e6c8a50a0f4dc19dfaf32a236e434e6304cb2a3a6b07fc252a74f5603ecfd7bf" exitCode=2 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.921685 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerDied","Data":"e6c8a50a0f4dc19dfaf32a236e434e6304cb2a3a6b07fc252a74f5603ecfd7bf"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.921736 4728 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.922439 4728 scope.go:117] "RemoveContainer" containerID="e6c8a50a0f4dc19dfaf32a236e434e6304cb2a3a6b07fc252a74f5603ecfd7bf" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.928840 4728 generic.go:334] "Generic (PLEG): container finished" podID="8fe18459-be14-4e98-a84d-1c4d6a158819" containerID="90ab195248d20499f8484f1081554acc3870b806b92c7a965103d4c45b38500b" exitCode=0 Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.928882 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerDied","Data":"90ab195248d20499f8484f1081554acc3870b806b92c7a965103d4c45b38500b"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.928907 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"979474eb4f28c791ef7d935d08757f6c476e4d814abfe8d8cbac2c41fcb144f5"} Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.947239 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.977740 4728 scope.go:117] "RemoveContainer" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:50 crc kubenswrapper[4728]: I1205 11:20:50.999903 4728 scope.go:117] "RemoveContainer" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.008983 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wchlf"] Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.012952 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wchlf"] Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.021087 4728 scope.go:117] "RemoveContainer" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.036107 4728 scope.go:117] "RemoveContainer" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.088189 4728 scope.go:117] "RemoveContainer" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.111993 4728 scope.go:117] "RemoveContainer" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.126141 4728 scope.go:117] "RemoveContainer" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.150841 4728 scope.go:117] "RemoveContainer" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.165515 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.165970 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166006 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} err="failed to get container status \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166030 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.166355 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": container with ID starting with 3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f not found: ID does not exist" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166383 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} err="failed to get container status \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": rpc error: code = NotFound desc = could not find container \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": container with ID starting with 3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166402 4728 scope.go:117] "RemoveContainer" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.166614 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": container with ID starting with b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f not found: ID does not exist" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166644 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} err="failed to get container status \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": rpc error: code = NotFound desc = could not find container \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": container with ID starting with b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.166659 4728 scope.go:117] "RemoveContainer" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.167128 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": container with ID starting with 4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6 not found: ID does not exist" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167178 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} err="failed to get container status \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": rpc error: code = NotFound desc = could not find container \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": container with ID starting with 4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167212 4728 scope.go:117] "RemoveContainer" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.167583 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": container with ID starting with 19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b not found: ID does not exist" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167607 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} err="failed to get container status \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": rpc error: code = NotFound desc = could not find container \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": container with ID starting with 19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167623 4728 scope.go:117] "RemoveContainer" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.167869 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": container with ID starting with d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55 not found: ID does not exist" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167893 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} err="failed to get container status \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": rpc error: code = NotFound desc = could not find container \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": container with ID starting with d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.167962 4728 scope.go:117] "RemoveContainer" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.168197 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": container with ID starting with 62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff not found: ID does not exist" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168220 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} err="failed to get container status \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": rpc error: code = NotFound desc = could not find container \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": container with ID starting with 62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168235 4728 scope.go:117] "RemoveContainer" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.168408 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": container with ID starting with 6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2 not found: ID does not exist" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168426 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} err="failed to get container status \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": rpc error: code = NotFound desc = could not find container \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": container with ID starting with 6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168439 4728 scope.go:117] "RemoveContainer" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.168640 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": container with ID starting with 02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7 not found: ID does not exist" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168667 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} err="failed to get container status \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": rpc error: code = NotFound desc = could not find container \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": container with ID starting with 02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168685 4728 scope.go:117] "RemoveContainer" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: E1205 11:20:51.168966 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": container with ID starting with d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079 not found: ID does not exist" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.168992 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} err="failed to get container status \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": rpc error: code = NotFound desc = could not find container \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": container with ID starting with d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169010 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169235 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} err="failed to get container status \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169254 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169493 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} err="failed to get container status \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": rpc error: code = NotFound desc = could not find container \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": container with ID starting with 3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169519 4728 scope.go:117] "RemoveContainer" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169725 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} err="failed to get container status \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": rpc error: code = NotFound desc = could not find container \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": container with ID starting with b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169742 4728 scope.go:117] "RemoveContainer" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169971 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} err="failed to get container status \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": rpc error: code = NotFound desc = could not find container \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": container with ID starting with 4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.169997 4728 scope.go:117] "RemoveContainer" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170201 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} err="failed to get container status \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": rpc error: code = NotFound desc = could not find container \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": container with ID starting with 19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170216 4728 scope.go:117] "RemoveContainer" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170458 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} err="failed to get container status \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": rpc error: code = NotFound desc = could not find container \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": container with ID starting with d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170476 4728 scope.go:117] "RemoveContainer" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170775 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} err="failed to get container status \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": rpc error: code = NotFound desc = could not find container \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": container with ID starting with 62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.170837 4728 scope.go:117] "RemoveContainer" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171043 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} err="failed to get container status \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": rpc error: code = NotFound desc = could not find container \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": container with ID starting with 6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171062 4728 scope.go:117] "RemoveContainer" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171224 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} err="failed to get container status \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": rpc error: code = NotFound desc = could not find container \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": container with ID starting with 02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171241 4728 scope.go:117] "RemoveContainer" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171424 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} err="failed to get container status \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": rpc error: code = NotFound desc = could not find container \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": container with ID starting with d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171446 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171651 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} err="failed to get container status \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171669 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171917 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} err="failed to get container status \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": rpc error: code = NotFound desc = could not find container \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": container with ID starting with 3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.171937 4728 scope.go:117] "RemoveContainer" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172141 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} err="failed to get container status \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": rpc error: code = NotFound desc = could not find container \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": container with ID starting with b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172166 4728 scope.go:117] "RemoveContainer" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172371 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} err="failed to get container status \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": rpc error: code = NotFound desc = could not find container \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": container with ID starting with 4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172389 4728 scope.go:117] "RemoveContainer" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172566 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} err="failed to get container status \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": rpc error: code = NotFound desc = could not find container \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": container with ID starting with 19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.172589 4728 scope.go:117] "RemoveContainer" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.174664 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} err="failed to get container status \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": rpc error: code = NotFound desc = could not find container \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": container with ID starting with d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.174684 4728 scope.go:117] "RemoveContainer" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.174961 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} err="failed to get container status \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": rpc error: code = NotFound desc = could not find container \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": container with ID starting with 62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.174989 4728 scope.go:117] "RemoveContainer" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175272 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} err="failed to get container status \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": rpc error: code = NotFound desc = could not find container \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": container with ID starting with 6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175297 4728 scope.go:117] "RemoveContainer" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175507 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} err="failed to get container status \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": rpc error: code = NotFound desc = could not find container \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": container with ID starting with 02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175526 4728 scope.go:117] "RemoveContainer" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175724 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} err="failed to get container status \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": rpc error: code = NotFound desc = could not find container \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": container with ID starting with d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.175745 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176180 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} err="failed to get container status \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176199 4728 scope.go:117] "RemoveContainer" containerID="3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176432 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f"} err="failed to get container status \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": rpc error: code = NotFound desc = could not find container \"3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f\": container with ID starting with 3661d4a688940abf6aa1e25bffdada007e91fa298063659adea831f4432ba21f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176451 4728 scope.go:117] "RemoveContainer" containerID="b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176689 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f"} err="failed to get container status \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": rpc error: code = NotFound desc = could not find container \"b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f\": container with ID starting with b9be8133cc8e8e518cd8a6a3ace93814e8d565d8bea2b81433ba07281ade2f3f not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176709 4728 scope.go:117] "RemoveContainer" containerID="4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176941 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6"} err="failed to get container status \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": rpc error: code = NotFound desc = could not find container \"4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6\": container with ID starting with 4aacb2f8e3ec748166a587a5c2c7c026382073d9e3b8e20caf4dc444e9cc70c6 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.176960 4728 scope.go:117] "RemoveContainer" containerID="19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177177 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b"} err="failed to get container status \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": rpc error: code = NotFound desc = could not find container \"19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b\": container with ID starting with 19041ddafe3e2fe2bb95577c9bc5e372ba443790469deb4f36634d3690a61b0b not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177204 4728 scope.go:117] "RemoveContainer" containerID="d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177359 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55"} err="failed to get container status \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": rpc error: code = NotFound desc = could not find container \"d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55\": container with ID starting with d82bd221438311b8ba34cfc26e43e590743b257ad91407befb8f4d2e17f7ba55 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177374 4728 scope.go:117] "RemoveContainer" containerID="62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177555 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff"} err="failed to get container status \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": rpc error: code = NotFound desc = could not find container \"62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff\": container with ID starting with 62ccfb6861d12cec3082bfe61fc1976dc0889398539797aa62979b632e74c9ff not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177573 4728 scope.go:117] "RemoveContainer" containerID="6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177966 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2"} err="failed to get container status \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": rpc error: code = NotFound desc = could not find container \"6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2\": container with ID starting with 6d488533307095d5c0efc56c8f32bac388226d2d5763c5c31db03c84585899f2 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.177986 4728 scope.go:117] "RemoveContainer" containerID="02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.178244 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7"} err="failed to get container status \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": rpc error: code = NotFound desc = could not find container \"02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7\": container with ID starting with 02bc0b9bc6ed08d2162af49b31f5e66ef70f78723e4016f6be5905d1227201b7 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.178269 4728 scope.go:117] "RemoveContainer" containerID="d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.178553 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079"} err="failed to get container status \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": rpc error: code = NotFound desc = could not find container \"d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079\": container with ID starting with d78663f198d123fd0eac3fbfa7164f16810156b358f8284ae2ac87bb579e5079 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.178576 4728 scope.go:117] "RemoveContainer" containerID="729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.178761 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841"} err="failed to get container status \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": rpc error: code = NotFound desc = could not find container \"729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841\": container with ID starting with 729b66dbb2cc4e235ea8209988e47495481e1acd5dc9b695c226c4c572a0b841 not found: ID does not exist" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.936881 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/2.log" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.937459 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/1.log" Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.937576 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gf8np" event={"ID":"f292da29-a632-47aa-8bcc-2d999eaa6c11","Type":"ContainerStarted","Data":"9e7b83207b4fce2c53b9af0b88f38eeb7c22f0327bfd53be7d56b407cfe75f19"} Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.942303 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"5161fb9c1af25c9ba60f560327d3307fc5ef7719d5af6b85fc44cebd873a3e25"} Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.942344 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"f82d25d2004b8293c13fc57d75654bb0a01550952f8630703bab7215fc99a61c"} Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.942357 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"10099f7fce8209ede7242044228bb1ee1746673a1f71d6401d13968f0b9cf016"} Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.942365 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"a98842e5a1afa5501c895f339a7e51d4f0c95d6046fd4a0a69508254a9f5e8c5"} Dec 05 11:20:51 crc kubenswrapper[4728]: I1205 11:20:51.942373 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"dc78f52670a185025d6a7a2674bc7ec52394e2615adb9738d731f8fde8d295f9"} Dec 05 11:20:52 crc kubenswrapper[4728]: I1205 11:20:52.359232 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5" path="/var/lib/kubelet/pods/1c5fa466-a6b6-4c17-b4b7-aff5b6311cc5/volumes" Dec 05 11:20:52 crc kubenswrapper[4728]: I1205 11:20:52.957067 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"3f0de001e8ea216c589f6b36cf627a1814ecca163cc0b1b1e3fb58ebf91a1c84"} Dec 05 11:20:54 crc kubenswrapper[4728]: I1205 11:20:54.973261 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"266cee463e68abc8efa9e0cfd1d39c03e0519f10d35f12b931352d79a2e54e95"} Dec 05 11:20:55 crc kubenswrapper[4728]: I1205 11:20:55.702298 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:20:55 crc kubenswrapper[4728]: I1205 11:20:55.702363 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:20:56 crc kubenswrapper[4728]: I1205 11:20:56.985931 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" event={"ID":"8fe18459-be14-4e98-a84d-1c4d6a158819","Type":"ContainerStarted","Data":"961c9a492e72edd0977e54c74f7a733115db37a1e3049260e860becbe2600d26"} Dec 05 11:20:56 crc kubenswrapper[4728]: I1205 11:20:56.987220 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:56 crc kubenswrapper[4728]: I1205 11:20:56.987241 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:56 crc kubenswrapper[4728]: I1205 11:20:56.987249 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:57 crc kubenswrapper[4728]: I1205 11:20:57.018663 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:57 crc kubenswrapper[4728]: I1205 11:20:57.023136 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:20:57 crc kubenswrapper[4728]: I1205 11:20:57.047119 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" podStartSLOduration=7.04710276 podStartE2EDuration="7.04710276s" podCreationTimestamp="2025-12-05 11:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:20:57.018104814 +0000 UTC m=+791.160227517" watchObservedRunningTime="2025-12-05 11:20:57.04710276 +0000 UTC m=+791.189225453" Dec 05 11:21:17 crc kubenswrapper[4728]: I1205 11:21:17.938598 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph"] Dec 05 11:21:17 crc kubenswrapper[4728]: I1205 11:21:17.940438 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Dec 05 11:21:17 crc kubenswrapper[4728]: I1205 11:21:17.943408 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 11:21:17 crc kubenswrapper[4728]: I1205 11:21:17.943601 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-r64bp" Dec 05 11:21:17 crc kubenswrapper[4728]: I1205 11:21:17.943611 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.007499 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-data\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.007557 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nlb9\" (UniqueName: \"kubernetes.io/projected/ec650e34-b972-46a5-886c-ba25b07fca9c-kube-api-access-9nlb9\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.007604 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-run\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.007641 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-log\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.109212 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-log\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.109432 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-data\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.109501 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nlb9\" (UniqueName: \"kubernetes.io/projected/ec650e34-b972-46a5-886c-ba25b07fca9c-kube-api-access-9nlb9\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.109575 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-run\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.110171 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-log\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.110213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-data\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.110380 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ec650e34-b972-46a5-886c-ba25b07fca9c-run\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.143921 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nlb9\" (UniqueName: \"kubernetes.io/projected/ec650e34-b972-46a5-886c-ba25b07fca9c-kube-api-access-9nlb9\") pod \"ceph\" (UID: \"ec650e34-b972-46a5-886c-ba25b07fca9c\") " pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: I1205 11:21:18.267055 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Dec 05 11:21:18 crc kubenswrapper[4728]: W1205 11:21:18.308907 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec650e34_b972_46a5_886c_ba25b07fca9c.slice/crio-f07fa53ff6a41e51a92deaa272c83c5431fe682fb5198c86aeffc0ca910ba188 WatchSource:0}: Error finding container f07fa53ff6a41e51a92deaa272c83c5431fe682fb5198c86aeffc0ca910ba188: Status 404 returned error can't find the container with id f07fa53ff6a41e51a92deaa272c83c5431fe682fb5198c86aeffc0ca910ba188 Dec 05 11:21:19 crc kubenswrapper[4728]: I1205 11:21:19.133241 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"ec650e34-b972-46a5-886c-ba25b07fca9c","Type":"ContainerStarted","Data":"f07fa53ff6a41e51a92deaa272c83c5431fe682fb5198c86aeffc0ca910ba188"} Dec 05 11:21:20 crc kubenswrapper[4728]: I1205 11:21:20.609936 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lv4vv" Dec 05 11:21:25 crc kubenswrapper[4728]: I1205 11:21:25.702996 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:21:25 crc kubenswrapper[4728]: I1205 11:21:25.703532 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:21:25 crc kubenswrapper[4728]: I1205 11:21:25.703597 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:21:25 crc kubenswrapper[4728]: I1205 11:21:25.704665 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:21:25 crc kubenswrapper[4728]: I1205 11:21:25.704773 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8" gracePeriod=600 Dec 05 11:21:26 crc kubenswrapper[4728]: I1205 11:21:26.182407 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8" exitCode=0 Dec 05 11:21:26 crc kubenswrapper[4728]: I1205 11:21:26.182492 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8"} Dec 05 11:21:26 crc kubenswrapper[4728]: I1205 11:21:26.182551 4728 scope.go:117] "RemoveContainer" containerID="e2f21c6c45d9f27ccde3dfcb5a8280ecb9fd8456606573b9fb4860fd32895139" Dec 05 11:21:35 crc kubenswrapper[4728]: I1205 11:21:35.233868 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"ec650e34-b972-46a5-886c-ba25b07fca9c","Type":"ContainerStarted","Data":"e49584aae8406d753bfb94daacfee86463fbad22f79720f632a2a3bfef33a29e"} Dec 05 11:21:35 crc kubenswrapper[4728]: I1205 11:21:35.237500 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9"} Dec 05 11:21:35 crc kubenswrapper[4728]: I1205 11:21:35.256887 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph" podStartSLOduration=1.764742326 podStartE2EDuration="18.256864829s" podCreationTimestamp="2025-12-05 11:21:17 +0000 UTC" firstStartedPulling="2025-12-05 11:21:18.31330072 +0000 UTC m=+812.455423433" lastFinishedPulling="2025-12-05 11:21:34.805423243 +0000 UTC m=+828.947545936" observedRunningTime="2025-12-05 11:21:35.251048713 +0000 UTC m=+829.393171456" watchObservedRunningTime="2025-12-05 11:21:35.256864829 +0000 UTC m=+829.398987582" Dec 05 11:21:46 crc kubenswrapper[4728]: I1205 11:21:46.513635 4728 scope.go:117] "RemoveContainer" containerID="441e36947749174097676ff62fe93aff6b9689d29fd12af2c1604adcceffa5d4" Dec 05 11:21:47 crc kubenswrapper[4728]: I1205 11:21:47.321004 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gf8np_f292da29-a632-47aa-8bcc-2d999eaa6c11/kube-multus/2.log" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.788723 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.790335 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.793977 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.794308 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mww47\" (UniqueName: \"kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.794536 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.808485 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.895616 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mww47\" (UniqueName: \"kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.896087 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.896511 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.896593 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.896841 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:40 crc kubenswrapper[4728]: I1205 11:22:40.928499 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mww47\" (UniqueName: \"kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47\") pod \"community-operators-vxsmj\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:41 crc kubenswrapper[4728]: I1205 11:22:41.110679 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:41 crc kubenswrapper[4728]: I1205 11:22:41.396734 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:41 crc kubenswrapper[4728]: I1205 11:22:41.649238 4728 generic.go:334] "Generic (PLEG): container finished" podID="66dfa503-526c-49ef-b857-0fb31f695171" containerID="19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c" exitCode=0 Dec 05 11:22:41 crc kubenswrapper[4728]: I1205 11:22:41.649314 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerDied","Data":"19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c"} Dec 05 11:22:41 crc kubenswrapper[4728]: I1205 11:22:41.649353 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerStarted","Data":"8fbafbb6f89a8bf654409dff43ca585c302d0e50a7ef989c4aeb2027e3c149a6"} Dec 05 11:22:42 crc kubenswrapper[4728]: I1205 11:22:42.660502 4728 generic.go:334] "Generic (PLEG): container finished" podID="66dfa503-526c-49ef-b857-0fb31f695171" containerID="a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57" exitCode=0 Dec 05 11:22:42 crc kubenswrapper[4728]: I1205 11:22:42.660597 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerDied","Data":"a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57"} Dec 05 11:22:43 crc kubenswrapper[4728]: I1205 11:22:43.671224 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerStarted","Data":"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226"} Dec 05 11:22:43 crc kubenswrapper[4728]: I1205 11:22:43.701270 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vxsmj" podStartSLOduration=2.141745947 podStartE2EDuration="3.701245169s" podCreationTimestamp="2025-12-05 11:22:40 +0000 UTC" firstStartedPulling="2025-12-05 11:22:41.651145715 +0000 UTC m=+895.793268448" lastFinishedPulling="2025-12-05 11:22:43.210644937 +0000 UTC m=+897.352767670" observedRunningTime="2025-12-05 11:22:43.699548172 +0000 UTC m=+897.841670925" watchObservedRunningTime="2025-12-05 11:22:43.701245169 +0000 UTC m=+897.843367892" Dec 05 11:22:51 crc kubenswrapper[4728]: I1205 11:22:51.110766 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:51 crc kubenswrapper[4728]: I1205 11:22:51.111330 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:51 crc kubenswrapper[4728]: I1205 11:22:51.162402 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:51 crc kubenswrapper[4728]: I1205 11:22:51.769591 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:51 crc kubenswrapper[4728]: I1205 11:22:51.814514 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:53 crc kubenswrapper[4728]: I1205 11:22:53.747348 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vxsmj" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="registry-server" containerID="cri-o://b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226" gracePeriod=2 Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.130161 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.279073 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mww47\" (UniqueName: \"kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47\") pod \"66dfa503-526c-49ef-b857-0fb31f695171\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.279262 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content\") pod \"66dfa503-526c-49ef-b857-0fb31f695171\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.279342 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities\") pod \"66dfa503-526c-49ef-b857-0fb31f695171\" (UID: \"66dfa503-526c-49ef-b857-0fb31f695171\") " Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.280726 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities" (OuterVolumeSpecName: "utilities") pod "66dfa503-526c-49ef-b857-0fb31f695171" (UID: "66dfa503-526c-49ef-b857-0fb31f695171"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.288478 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47" (OuterVolumeSpecName: "kube-api-access-mww47") pod "66dfa503-526c-49ef-b857-0fb31f695171" (UID: "66dfa503-526c-49ef-b857-0fb31f695171"). InnerVolumeSpecName "kube-api-access-mww47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.346996 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66dfa503-526c-49ef-b857-0fb31f695171" (UID: "66dfa503-526c-49ef-b857-0fb31f695171"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.381368 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.381414 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mww47\" (UniqueName: \"kubernetes.io/projected/66dfa503-526c-49ef-b857-0fb31f695171-kube-api-access-mww47\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.381428 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dfa503-526c-49ef-b857-0fb31f695171-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.757466 4728 generic.go:334] "Generic (PLEG): container finished" podID="66dfa503-526c-49ef-b857-0fb31f695171" containerID="b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226" exitCode=0 Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.757730 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerDied","Data":"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226"} Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.757771 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vxsmj" event={"ID":"66dfa503-526c-49ef-b857-0fb31f695171","Type":"ContainerDied","Data":"8fbafbb6f89a8bf654409dff43ca585c302d0e50a7ef989c4aeb2027e3c149a6"} Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.757846 4728 scope.go:117] "RemoveContainer" containerID="b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226" Dec 05 11:22:54 crc kubenswrapper[4728]: I1205 11:22:54.758082 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vxsmj" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.435103 4728 scope.go:117] "RemoveContainer" containerID="a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.452435 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.455739 4728 scope.go:117] "RemoveContainer" containerID="19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.467168 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vxsmj"] Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.480082 4728 scope.go:117] "RemoveContainer" containerID="b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226" Dec 05 11:22:55 crc kubenswrapper[4728]: E1205 11:22:55.480547 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226\": container with ID starting with b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226 not found: ID does not exist" containerID="b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.480580 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226"} err="failed to get container status \"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226\": rpc error: code = NotFound desc = could not find container \"b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226\": container with ID starting with b127d585261a6b4814c8939255fe27c695a1b4c046ef445b35a7337c662fa226 not found: ID does not exist" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.480603 4728 scope.go:117] "RemoveContainer" containerID="a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57" Dec 05 11:22:55 crc kubenswrapper[4728]: E1205 11:22:55.480908 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57\": container with ID starting with a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57 not found: ID does not exist" containerID="a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.480949 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57"} err="failed to get container status \"a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57\": rpc error: code = NotFound desc = could not find container \"a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57\": container with ID starting with a467537e23ed898911406af39f9e6d12d328e0649ce365b6dd5dda54f5c0cc57 not found: ID does not exist" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.480969 4728 scope.go:117] "RemoveContainer" containerID="19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c" Dec 05 11:22:55 crc kubenswrapper[4728]: E1205 11:22:55.481251 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c\": container with ID starting with 19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c not found: ID does not exist" containerID="19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c" Dec 05 11:22:55 crc kubenswrapper[4728]: I1205 11:22:55.481276 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c"} err="failed to get container status \"19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c\": rpc error: code = NotFound desc = could not find container \"19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c\": container with ID starting with 19dbe19f94a2543800303edb0e075991533676f5498ee824219bed36f5a57e5c not found: ID does not exist" Dec 05 11:22:56 crc kubenswrapper[4728]: I1205 11:22:56.373459 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dfa503-526c-49ef-b857-0fb31f695171" path="/var/lib/kubelet/pods/66dfa503-526c-49ef-b857-0fb31f695171/volumes" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.859353 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw"] Dec 05 11:22:59 crc kubenswrapper[4728]: E1205 11:22:59.859901 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="extract-utilities" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.859918 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="extract-utilities" Dec 05 11:22:59 crc kubenswrapper[4728]: E1205 11:22:59.859939 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="extract-content" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.859946 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="extract-content" Dec 05 11:22:59 crc kubenswrapper[4728]: E1205 11:22:59.859959 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="registry-server" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.859966 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="registry-server" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.860107 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dfa503-526c-49ef-b857-0fb31f695171" containerName="registry-server" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.860980 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.863156 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.876677 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw"] Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.987223 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.987298 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:22:59 crc kubenswrapper[4728]: I1205 11:22:59.987378 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx9xp\" (UniqueName: \"kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.088354 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.088437 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx9xp\" (UniqueName: \"kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.088579 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.089270 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.089480 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.113733 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx9xp\" (UniqueName: \"kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.177568 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.416675 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw"] Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.792374 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerStarted","Data":"fd4da7646937166f017964f894f8f1b56e0a367d622886d0d5bbafb4ffe61861"} Dec 05 11:23:00 crc kubenswrapper[4728]: I1205 11:23:00.792675 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerStarted","Data":"1affbaf682523410206317c500f1db247980dbd820e54b42258dbb2dbc738fab"} Dec 05 11:23:01 crc kubenswrapper[4728]: I1205 11:23:01.800433 4728 generic.go:334] "Generic (PLEG): container finished" podID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerID="fd4da7646937166f017964f894f8f1b56e0a367d622886d0d5bbafb4ffe61861" exitCode=0 Dec 05 11:23:01 crc kubenswrapper[4728]: I1205 11:23:01.800492 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerDied","Data":"fd4da7646937166f017964f894f8f1b56e0a367d622886d0d5bbafb4ffe61861"} Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.001054 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.003473 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.008671 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.011360 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqdcw\" (UniqueName: \"kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.011529 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.011597 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.112855 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.112914 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.112964 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqdcw\" (UniqueName: \"kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.113442 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.113600 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.135529 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqdcw\" (UniqueName: \"kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw\") pod \"redhat-operators-b75mm\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.334734 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.730320 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:02 crc kubenswrapper[4728]: W1205 11:23:02.737321 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod595cb777_40d8_45d7_a9f6_572de2198cb7.slice/crio-72591aa1beb3fd56ef4268bef72614097fdde6ef1c2bf6ab628f1bcf2b938820 WatchSource:0}: Error finding container 72591aa1beb3fd56ef4268bef72614097fdde6ef1c2bf6ab628f1bcf2b938820: Status 404 returned error can't find the container with id 72591aa1beb3fd56ef4268bef72614097fdde6ef1c2bf6ab628f1bcf2b938820 Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.809089 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerStarted","Data":"4af05bf56e848ac19c7349a84ddc3a9cea5722076b7abc1bfa794a11c0eecab2"} Dec 05 11:23:02 crc kubenswrapper[4728]: I1205 11:23:02.811803 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerStarted","Data":"72591aa1beb3fd56ef4268bef72614097fdde6ef1c2bf6ab628f1bcf2b938820"} Dec 05 11:23:03 crc kubenswrapper[4728]: I1205 11:23:03.820299 4728 generic.go:334] "Generic (PLEG): container finished" podID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerID="4af05bf56e848ac19c7349a84ddc3a9cea5722076b7abc1bfa794a11c0eecab2" exitCode=0 Dec 05 11:23:03 crc kubenswrapper[4728]: I1205 11:23:03.820351 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerDied","Data":"4af05bf56e848ac19c7349a84ddc3a9cea5722076b7abc1bfa794a11c0eecab2"} Dec 05 11:23:03 crc kubenswrapper[4728]: I1205 11:23:03.824165 4728 generic.go:334] "Generic (PLEG): container finished" podID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerID="35caa6d2d10482090b001c57e7a035690464b0d9e507d68f51030f6fc6555fb0" exitCode=0 Dec 05 11:23:03 crc kubenswrapper[4728]: I1205 11:23:03.824218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerDied","Data":"35caa6d2d10482090b001c57e7a035690464b0d9e507d68f51030f6fc6555fb0"} Dec 05 11:23:04 crc kubenswrapper[4728]: I1205 11:23:04.833951 4728 generic.go:334] "Generic (PLEG): container finished" podID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerID="2ef542d2b37f22e6caa6400a6a83ce37c2392d3c2e5692ceae1326c7d114dafc" exitCode=0 Dec 05 11:23:04 crc kubenswrapper[4728]: I1205 11:23:04.834019 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerDied","Data":"2ef542d2b37f22e6caa6400a6a83ce37c2392d3c2e5692ceae1326c7d114dafc"} Dec 05 11:23:04 crc kubenswrapper[4728]: I1205 11:23:04.839309 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerStarted","Data":"551ccbb4b7c8cb1d513b015023317394be3fb2d95bab0ef191f5d7d7134ec502"} Dec 05 11:23:05 crc kubenswrapper[4728]: I1205 11:23:05.846290 4728 generic.go:334] "Generic (PLEG): container finished" podID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerID="551ccbb4b7c8cb1d513b015023317394be3fb2d95bab0ef191f5d7d7134ec502" exitCode=0 Dec 05 11:23:05 crc kubenswrapper[4728]: I1205 11:23:05.846345 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerDied","Data":"551ccbb4b7c8cb1d513b015023317394be3fb2d95bab0ef191f5d7d7134ec502"} Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.098694 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.264312 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx9xp\" (UniqueName: \"kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp\") pod \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.264415 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util\") pod \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.265039 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle\") pod \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\" (UID: \"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e\") " Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.265422 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle" (OuterVolumeSpecName: "bundle") pod "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" (UID: "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.269036 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp" (OuterVolumeSpecName: "kube-api-access-kx9xp") pod "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" (UID: "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e"). InnerVolumeSpecName "kube-api-access-kx9xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.278960 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util" (OuterVolumeSpecName: "util") pod "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" (UID: "5ce9b03b-d5f8-4249-80c3-8e637c09bf8e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.366409 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx9xp\" (UniqueName: \"kubernetes.io/projected/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-kube-api-access-kx9xp\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.366461 4728 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.366482 4728 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ce9b03b-d5f8-4249-80c3-8e637c09bf8e-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.856400 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerStarted","Data":"7d28dd0d795e4b9f79539882d1653968c89c0d07f65b091b47e467e0aee0c718"} Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.858262 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" event={"ID":"5ce9b03b-d5f8-4249-80c3-8e637c09bf8e","Type":"ContainerDied","Data":"1affbaf682523410206317c500f1db247980dbd820e54b42258dbb2dbc738fab"} Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.858299 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1affbaf682523410206317c500f1db247980dbd820e54b42258dbb2dbc738fab" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.858334 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw" Dec 05 11:23:06 crc kubenswrapper[4728]: I1205 11:23:06.882206 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b75mm" podStartSLOduration=3.206541718 podStartE2EDuration="5.88218309s" podCreationTimestamp="2025-12-05 11:23:01 +0000 UTC" firstStartedPulling="2025-12-05 11:23:03.82632274 +0000 UTC m=+917.968445473" lastFinishedPulling="2025-12-05 11:23:06.501964152 +0000 UTC m=+920.644086845" observedRunningTime="2025-12-05 11:23:06.874267533 +0000 UTC m=+921.016390246" watchObservedRunningTime="2025-12-05 11:23:06.88218309 +0000 UTC m=+921.024305793" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.139631 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82"] Dec 05 11:23:11 crc kubenswrapper[4728]: E1205 11:23:11.140314 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="extract" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.140326 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="extract" Dec 05 11:23:11 crc kubenswrapper[4728]: E1205 11:23:11.140348 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="util" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.140354 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="util" Dec 05 11:23:11 crc kubenswrapper[4728]: E1205 11:23:11.140364 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="pull" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.140370 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="pull" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.140458 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce9b03b-d5f8-4249-80c3-8e637c09bf8e" containerName="extract" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.140838 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.143614 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.143630 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-njbnj" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.143735 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.223746 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82"] Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.328713 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf6px\" (UniqueName: \"kubernetes.io/projected/2ce6b79a-c293-472b-90f8-7b56ce77b4cf-kube-api-access-vf6px\") pod \"nmstate-operator-5b5b58f5c8-5rg82\" (UID: \"2ce6b79a-c293-472b-90f8-7b56ce77b4cf\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.430167 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf6px\" (UniqueName: \"kubernetes.io/projected/2ce6b79a-c293-472b-90f8-7b56ce77b4cf-kube-api-access-vf6px\") pod \"nmstate-operator-5b5b58f5c8-5rg82\" (UID: \"2ce6b79a-c293-472b-90f8-7b56ce77b4cf\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.450205 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf6px\" (UniqueName: \"kubernetes.io/projected/2ce6b79a-c293-472b-90f8-7b56ce77b4cf-kube-api-access-vf6px\") pod \"nmstate-operator-5b5b58f5c8-5rg82\" (UID: \"2ce6b79a-c293-472b-90f8-7b56ce77b4cf\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.455644 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.688388 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82"] Dec 05 11:23:11 crc kubenswrapper[4728]: I1205 11:23:11.893728 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" event={"ID":"2ce6b79a-c293-472b-90f8-7b56ce77b4cf","Type":"ContainerStarted","Data":"32d6da84e264c598061d076a3ba9e17bac9d46d1a301a6880f22c0fe80fe0000"} Dec 05 11:23:12 crc kubenswrapper[4728]: I1205 11:23:12.335810 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:12 crc kubenswrapper[4728]: I1205 11:23:12.336104 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:12 crc kubenswrapper[4728]: I1205 11:23:12.385518 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:12 crc kubenswrapper[4728]: I1205 11:23:12.939233 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.194505 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.195553 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.219330 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.352681 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.352758 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hmhv\" (UniqueName: \"kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.352784 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.455020 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.455150 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hmhv\" (UniqueName: \"kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.455179 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.455602 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.455913 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.481763 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hmhv\" (UniqueName: \"kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv\") pod \"certified-operators-rww9w\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.522886 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.947523 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.994924 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:13 crc kubenswrapper[4728]: I1205 11:23:13.996247 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.064382 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.163204 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2znc8\" (UniqueName: \"kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.163248 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.163401 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.264532 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.264603 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2znc8\" (UniqueName: \"kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.264624 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.265122 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.265223 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.285119 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2znc8\" (UniqueName: \"kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8\") pod \"redhat-marketplace-tmgk2\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.334726 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.572955 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:14 crc kubenswrapper[4728]: W1205 11:23:14.579670 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9733c4e_7cc6_45e6_9c3a_b4ca13d58e95.slice/crio-6d58beb700e2028a2bbb7428810ac98ce9f1d9a3450a1c994e116a27cc6c2fbb WatchSource:0}: Error finding container 6d58beb700e2028a2bbb7428810ac98ce9f1d9a3450a1c994e116a27cc6c2fbb: Status 404 returned error can't find the container with id 6d58beb700e2028a2bbb7428810ac98ce9f1d9a3450a1c994e116a27cc6c2fbb Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.919970 4728 generic.go:334] "Generic (PLEG): container finished" podID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerID="357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472" exitCode=0 Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.920071 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerDied","Data":"357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472"} Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.920118 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerStarted","Data":"6d58beb700e2028a2bbb7428810ac98ce9f1d9a3450a1c994e116a27cc6c2fbb"} Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.923350 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" event={"ID":"2ce6b79a-c293-472b-90f8-7b56ce77b4cf","Type":"ContainerStarted","Data":"6231eae29b0e1a7f6d37590f0a3f958703535ef8b43d5e05638fbdb29784e365"} Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.928749 4728 generic.go:334] "Generic (PLEG): container finished" podID="43dbdd93-72c2-48b2-8829-7557303354be" containerID="0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28" exitCode=0 Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.928811 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerDied","Data":"0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28"} Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.928855 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerStarted","Data":"7610c12fd2417f49e1c03afe53e965679d7a02763b822dd957456f5c032da172"} Dec 05 11:23:14 crc kubenswrapper[4728]: I1205 11:23:14.960280 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5rg82" podStartSLOduration=1.897443761 podStartE2EDuration="3.960259505s" podCreationTimestamp="2025-12-05 11:23:11 +0000 UTC" firstStartedPulling="2025-12-05 11:23:11.697617212 +0000 UTC m=+925.839739905" lastFinishedPulling="2025-12-05 11:23:13.760432966 +0000 UTC m=+927.902555649" observedRunningTime="2025-12-05 11:23:14.956316646 +0000 UTC m=+929.098439359" watchObservedRunningTime="2025-12-05 11:23:14.960259505 +0000 UTC m=+929.102382198" Dec 05 11:23:15 crc kubenswrapper[4728]: I1205 11:23:15.938487 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerStarted","Data":"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee"} Dec 05 11:23:15 crc kubenswrapper[4728]: I1205 11:23:15.941041 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerStarted","Data":"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5"} Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.785008 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.785491 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b75mm" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="registry-server" containerID="cri-o://7d28dd0d795e4b9f79539882d1653968c89c0d07f65b091b47e467e0aee0c718" gracePeriod=2 Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.956187 4728 generic.go:334] "Generic (PLEG): container finished" podID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerID="7d28dd0d795e4b9f79539882d1653968c89c0d07f65b091b47e467e0aee0c718" exitCode=0 Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.956567 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerDied","Data":"7d28dd0d795e4b9f79539882d1653968c89c0d07f65b091b47e467e0aee0c718"} Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.957722 4728 generic.go:334] "Generic (PLEG): container finished" podID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerID="cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee" exitCode=0 Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.957757 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerDied","Data":"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee"} Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.962942 4728 generic.go:334] "Generic (PLEG): container finished" podID="43dbdd93-72c2-48b2-8829-7557303354be" containerID="4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5" exitCode=0 Dec 05 11:23:17 crc kubenswrapper[4728]: I1205 11:23:17.962988 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerDied","Data":"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5"} Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.155172 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.325340 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities\") pod \"595cb777-40d8-45d7-a9f6-572de2198cb7\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.325381 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqdcw\" (UniqueName: \"kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw\") pod \"595cb777-40d8-45d7-a9f6-572de2198cb7\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.325422 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content\") pod \"595cb777-40d8-45d7-a9f6-572de2198cb7\" (UID: \"595cb777-40d8-45d7-a9f6-572de2198cb7\") " Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.326747 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities" (OuterVolumeSpecName: "utilities") pod "595cb777-40d8-45d7-a9f6-572de2198cb7" (UID: "595cb777-40d8-45d7-a9f6-572de2198cb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.331943 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw" (OuterVolumeSpecName: "kube-api-access-wqdcw") pod "595cb777-40d8-45d7-a9f6-572de2198cb7" (UID: "595cb777-40d8-45d7-a9f6-572de2198cb7"). InnerVolumeSpecName "kube-api-access-wqdcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.427358 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.427393 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqdcw\" (UniqueName: \"kubernetes.io/projected/595cb777-40d8-45d7-a9f6-572de2198cb7-kube-api-access-wqdcw\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.456355 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "595cb777-40d8-45d7-a9f6-572de2198cb7" (UID: "595cb777-40d8-45d7-a9f6-572de2198cb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.528840 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595cb777-40d8-45d7-a9f6-572de2198cb7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.970196 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerStarted","Data":"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579"} Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.972910 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerStarted","Data":"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae"} Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.983612 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b75mm" event={"ID":"595cb777-40d8-45d7-a9f6-572de2198cb7","Type":"ContainerDied","Data":"72591aa1beb3fd56ef4268bef72614097fdde6ef1c2bf6ab628f1bcf2b938820"} Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.983670 4728 scope.go:117] "RemoveContainer" containerID="7d28dd0d795e4b9f79539882d1653968c89c0d07f65b091b47e467e0aee0c718" Dec 05 11:23:18 crc kubenswrapper[4728]: I1205 11:23:18.983863 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b75mm" Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.000067 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tmgk2" podStartSLOduration=2.252891287 podStartE2EDuration="6.000045783s" podCreationTimestamp="2025-12-05 11:23:13 +0000 UTC" firstStartedPulling="2025-12-05 11:23:14.922487268 +0000 UTC m=+929.064609961" lastFinishedPulling="2025-12-05 11:23:18.669641744 +0000 UTC m=+932.811764457" observedRunningTime="2025-12-05 11:23:18.996198697 +0000 UTC m=+933.138321400" watchObservedRunningTime="2025-12-05 11:23:19.000045783 +0000 UTC m=+933.142168486" Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.014434 4728 scope.go:117] "RemoveContainer" containerID="551ccbb4b7c8cb1d513b015023317394be3fb2d95bab0ef191f5d7d7134ec502" Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.021901 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rww9w" podStartSLOduration=2.362705325 podStartE2EDuration="6.021882303s" podCreationTimestamp="2025-12-05 11:23:13 +0000 UTC" firstStartedPulling="2025-12-05 11:23:14.930452347 +0000 UTC m=+929.072575040" lastFinishedPulling="2025-12-05 11:23:18.589629295 +0000 UTC m=+932.731752018" observedRunningTime="2025-12-05 11:23:19.017293357 +0000 UTC m=+933.159416050" watchObservedRunningTime="2025-12-05 11:23:19.021882303 +0000 UTC m=+933.164005016" Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.044405 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.047454 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b75mm"] Dec 05 11:23:19 crc kubenswrapper[4728]: I1205 11:23:19.052879 4728 scope.go:117] "RemoveContainer" containerID="35caa6d2d10482090b001c57e7a035690464b0d9e507d68f51030f6fc6555fb0" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.329724 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp"] Dec 05 11:23:20 crc kubenswrapper[4728]: E1205 11:23:20.330240 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="extract-content" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.330252 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="extract-content" Dec 05 11:23:20 crc kubenswrapper[4728]: E1205 11:23:20.330266 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="registry-server" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.330272 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="registry-server" Dec 05 11:23:20 crc kubenswrapper[4728]: E1205 11:23:20.330279 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="extract-utilities" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.330287 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="extract-utilities" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.330386 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" containerName="registry-server" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.330979 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.332557 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-jxq7b" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.341021 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.348359 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.349363 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srn58\" (UniqueName: \"kubernetes.io/projected/c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6-kube-api-access-srn58\") pod \"nmstate-metrics-7f946cbc9-6jmjp\" (UID: \"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.349490 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.351583 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.370007 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="595cb777-40d8-45d7-a9f6-572de2198cb7" path="/var/lib/kubelet/pods/595cb777-40d8-45d7-a9f6-572de2198cb7/volumes" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.380350 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.394687 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-x9g4q"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.395567 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450624 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knwfr\" (UniqueName: \"kubernetes.io/projected/522d4b95-dda7-40b8-960e-f19f1b147c41-kube-api-access-knwfr\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450679 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srn58\" (UniqueName: \"kubernetes.io/projected/c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6-kube-api-access-srn58\") pod \"nmstate-metrics-7f946cbc9-6jmjp\" (UID: \"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450706 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7c2cda78-4bb1-416c-8762-8c1618a755ad-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450738 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbpdf\" (UniqueName: \"kubernetes.io/projected/7c2cda78-4bb1-416c-8762-8c1618a755ad-kube-api-access-qbpdf\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450764 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-ovs-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.450973 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-nmstate-lock\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.451074 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-dbus-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.466711 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.467443 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.469649 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.469657 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.470347 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-jdfsh" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.480107 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srn58\" (UniqueName: \"kubernetes.io/projected/c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6-kube-api-access-srn58\") pod \"nmstate-metrics-7f946cbc9-6jmjp\" (UID: \"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.514563 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.551911 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-nmstate-lock\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552248 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-dbus-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552279 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbht7\" (UniqueName: \"kubernetes.io/projected/f97e7e26-99e0-403f-a6d5-5aa008101459-kube-api-access-cbht7\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552002 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-nmstate-lock\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552306 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knwfr\" (UniqueName: \"kubernetes.io/projected/522d4b95-dda7-40b8-960e-f19f1b147c41-kube-api-access-knwfr\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552542 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f97e7e26-99e0-403f-a6d5-5aa008101459-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552576 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7c2cda78-4bb1-416c-8762-8c1618a755ad-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552549 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-dbus-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552639 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbpdf\" (UniqueName: \"kubernetes.io/projected/7c2cda78-4bb1-416c-8762-8c1618a755ad-kube-api-access-qbpdf\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552679 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552704 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-ovs-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.552866 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/522d4b95-dda7-40b8-960e-f19f1b147c41-ovs-socket\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.557683 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/7c2cda78-4bb1-416c-8762-8c1618a755ad-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.573736 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knwfr\" (UniqueName: \"kubernetes.io/projected/522d4b95-dda7-40b8-960e-f19f1b147c41-kube-api-access-knwfr\") pod \"nmstate-handler-x9g4q\" (UID: \"522d4b95-dda7-40b8-960e-f19f1b147c41\") " pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.576461 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbpdf\" (UniqueName: \"kubernetes.io/projected/7c2cda78-4bb1-416c-8762-8c1618a755ad-kube-api-access-qbpdf\") pod \"nmstate-webhook-5f6d4c5ccb-7xz5t\" (UID: \"7c2cda78-4bb1-416c-8762-8c1618a755ad\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.652959 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.653689 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbht7\" (UniqueName: \"kubernetes.io/projected/f97e7e26-99e0-403f-a6d5-5aa008101459-kube-api-access-cbht7\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.653715 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f97e7e26-99e0-403f-a6d5-5aa008101459-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.653745 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: E1205 11:23:20.653888 4728 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 05 11:23:20 crc kubenswrapper[4728]: E1205 11:23:20.653931 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert podName:f97e7e26-99e0-403f-a6d5-5aa008101459 nodeName:}" failed. No retries permitted until 2025-12-05 11:23:21.153915119 +0000 UTC m=+935.296037812 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-gxx2h" (UID: "f97e7e26-99e0-403f-a6d5-5aa008101459") : secret "plugin-serving-cert" not found Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.654910 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f97e7e26-99e0-403f-a6d5-5aa008101459-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.655535 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7c9dd58c6b-n9cfq"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.656465 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.669714 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c9dd58c6b-n9cfq"] Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.677886 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.698339 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbht7\" (UniqueName: \"kubernetes.io/projected/f97e7e26-99e0-403f-a6d5-5aa008101459-kube-api-access-cbht7\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.715261 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.858226 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859112 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz5sc\" (UniqueName: \"kubernetes.io/projected/853c972b-0ce8-4f03-95d7-58ddfd65e874-kube-api-access-cz5sc\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859191 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-oauth-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859222 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859278 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-oauth-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859363 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-trusted-ca-bundle\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.859424 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-service-ca\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.860726 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp"] Dec 05 11:23:20 crc kubenswrapper[4728]: W1205 11:23:20.868319 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc560c0c5_c5bd_41b6_a77c_b8ff9452b7e6.slice/crio-2b2c0fb2c2da9cbeb3fdb42e334dbbbfc5c481a0d26234c100b6f30891dd50cf WatchSource:0}: Error finding container 2b2c0fb2c2da9cbeb3fdb42e334dbbbfc5c481a0d26234c100b6f30891dd50cf: Status 404 returned error can't find the container with id 2b2c0fb2c2da9cbeb3fdb42e334dbbbfc5c481a0d26234c100b6f30891dd50cf Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.913994 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t"] Dec 05 11:23:20 crc kubenswrapper[4728]: W1205 11:23:20.920269 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c2cda78_4bb1_416c_8762_8c1618a755ad.slice/crio-bc8b7e5647435bf8688a393d41f1c06eadea042f2418fe153252739551627fe7 WatchSource:0}: Error finding container bc8b7e5647435bf8688a393d41f1c06eadea042f2418fe153252739551627fe7: Status 404 returned error can't find the container with id bc8b7e5647435bf8688a393d41f1c06eadea042f2418fe153252739551627fe7 Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960448 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960506 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz5sc\" (UniqueName: \"kubernetes.io/projected/853c972b-0ce8-4f03-95d7-58ddfd65e874-kube-api-access-cz5sc\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960539 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-oauth-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960557 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960588 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-oauth-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960610 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-trusted-ca-bundle\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.960631 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-service-ca\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.961458 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.961485 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-service-ca\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.961496 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-oauth-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.961867 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/853c972b-0ce8-4f03-95d7-58ddfd65e874-trusted-ca-bundle\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.964782 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-oauth-config\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.964811 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/853c972b-0ce8-4f03-95d7-58ddfd65e874-console-serving-cert\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.976872 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz5sc\" (UniqueName: \"kubernetes.io/projected/853c972b-0ce8-4f03-95d7-58ddfd65e874-kube-api-access-cz5sc\") pod \"console-7c9dd58c6b-n9cfq\" (UID: \"853c972b-0ce8-4f03-95d7-58ddfd65e874\") " pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:20 crc kubenswrapper[4728]: I1205 11:23:20.978299 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.002133 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-x9g4q" event={"ID":"522d4b95-dda7-40b8-960e-f19f1b147c41","Type":"ContainerStarted","Data":"a5225230be5df1bd110fa5158b552a8b4fe638e86231ad5b4996aa3b24d68555"} Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.004043 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" event={"ID":"7c2cda78-4bb1-416c-8762-8c1618a755ad","Type":"ContainerStarted","Data":"bc8b7e5647435bf8688a393d41f1c06eadea042f2418fe153252739551627fe7"} Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.009318 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" event={"ID":"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6","Type":"ContainerStarted","Data":"2b2c0fb2c2da9cbeb3fdb42e334dbbbfc5c481a0d26234c100b6f30891dd50cf"} Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.164553 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c9dd58c6b-n9cfq"] Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.164807 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.169308 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f97e7e26-99e0-403f-a6d5-5aa008101459-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gxx2h\" (UID: \"f97e7e26-99e0-403f-a6d5-5aa008101459\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.439624 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" Dec 05 11:23:21 crc kubenswrapper[4728]: I1205 11:23:21.629290 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h"] Dec 05 11:23:21 crc kubenswrapper[4728]: W1205 11:23:21.633193 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf97e7e26_99e0_403f_a6d5_5aa008101459.slice/crio-cf83491199c1ce4c551e19d0a8abae21de4150d7d7782842d06fe0e001024b4a WatchSource:0}: Error finding container cf83491199c1ce4c551e19d0a8abae21de4150d7d7782842d06fe0e001024b4a: Status 404 returned error can't find the container with id cf83491199c1ce4c551e19d0a8abae21de4150d7d7782842d06fe0e001024b4a Dec 05 11:23:22 crc kubenswrapper[4728]: I1205 11:23:22.017194 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c9dd58c6b-n9cfq" event={"ID":"853c972b-0ce8-4f03-95d7-58ddfd65e874","Type":"ContainerStarted","Data":"823d1ca845f4e78e93219b14c3a501164a3e78b1f44356aa5ecdba478e6e9961"} Dec 05 11:23:22 crc kubenswrapper[4728]: I1205 11:23:22.017252 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c9dd58c6b-n9cfq" event={"ID":"853c972b-0ce8-4f03-95d7-58ddfd65e874","Type":"ContainerStarted","Data":"aafe60c7e17b3916f8dd5ba4139cf33c717d0808951f30d70faa2087bb9b0275"} Dec 05 11:23:22 crc kubenswrapper[4728]: I1205 11:23:22.019211 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" event={"ID":"f97e7e26-99e0-403f-a6d5-5aa008101459","Type":"ContainerStarted","Data":"cf83491199c1ce4c551e19d0a8abae21de4150d7d7782842d06fe0e001024b4a"} Dec 05 11:23:22 crc kubenswrapper[4728]: I1205 11:23:22.044054 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7c9dd58c6b-n9cfq" podStartSLOduration=2.044035029 podStartE2EDuration="2.044035029s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:23:22.042948389 +0000 UTC m=+936.185071092" watchObservedRunningTime="2025-12-05 11:23:22.044035029 +0000 UTC m=+936.186157722" Dec 05 11:23:23 crc kubenswrapper[4728]: I1205 11:23:23.523192 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:23 crc kubenswrapper[4728]: I1205 11:23:23.523494 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:23 crc kubenswrapper[4728]: I1205 11:23:23.570663 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.034743 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" event={"ID":"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6","Type":"ContainerStarted","Data":"6bb2318ff4737b5d5f2103bbc7d2885635f4123a6d43c19aaf1550e876a97650"} Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.091760 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.335809 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.335862 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.384675 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:24 crc kubenswrapper[4728]: I1205 11:23:24.391185 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.044858 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-x9g4q" event={"ID":"522d4b95-dda7-40b8-960e-f19f1b147c41","Type":"ContainerStarted","Data":"0a2509a24bc2dff7bdea987f8557b3af90a11163c16d9caa81bd3fedd67cb1e3"} Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.044979 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.046766 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" event={"ID":"f97e7e26-99e0-403f-a6d5-5aa008101459","Type":"ContainerStarted","Data":"27f41c70356d8ed16530878c3291795b3f520a8d912f65fd5956dfa50c58b263"} Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.064393 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-x9g4q" podStartSLOduration=2.001754817 podStartE2EDuration="5.064363093s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="2025-12-05 11:23:20.759495141 +0000 UTC m=+934.901617834" lastFinishedPulling="2025-12-05 11:23:23.822103387 +0000 UTC m=+937.964226110" observedRunningTime="2025-12-05 11:23:25.063874089 +0000 UTC m=+939.205996812" watchObservedRunningTime="2025-12-05 11:23:25.064363093 +0000 UTC m=+939.206485796" Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.092389 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gxx2h" podStartSLOduration=1.999647318 podStartE2EDuration="5.092363922s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="2025-12-05 11:23:21.635160683 +0000 UTC m=+935.777283386" lastFinishedPulling="2025-12-05 11:23:24.727877297 +0000 UTC m=+938.869999990" observedRunningTime="2025-12-05 11:23:25.084101865 +0000 UTC m=+939.226224588" watchObservedRunningTime="2025-12-05 11:23:25.092363922 +0000 UTC m=+939.234486615" Dec 05 11:23:25 crc kubenswrapper[4728]: I1205 11:23:25.106727 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:26 crc kubenswrapper[4728]: I1205 11:23:26.051429 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rww9w" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="registry-server" containerID="cri-o://7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae" gracePeriod=2 Dec 05 11:23:26 crc kubenswrapper[4728]: I1205 11:23:26.785936 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:26 crc kubenswrapper[4728]: I1205 11:23:26.908876 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.046157 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities\") pod \"43dbdd93-72c2-48b2-8829-7557303354be\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.046378 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hmhv\" (UniqueName: \"kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv\") pod \"43dbdd93-72c2-48b2-8829-7557303354be\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.046532 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content\") pod \"43dbdd93-72c2-48b2-8829-7557303354be\" (UID: \"43dbdd93-72c2-48b2-8829-7557303354be\") " Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.047124 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities" (OuterVolumeSpecName: "utilities") pod "43dbdd93-72c2-48b2-8829-7557303354be" (UID: "43dbdd93-72c2-48b2-8829-7557303354be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.053954 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv" (OuterVolumeSpecName: "kube-api-access-8hmhv") pod "43dbdd93-72c2-48b2-8829-7557303354be" (UID: "43dbdd93-72c2-48b2-8829-7557303354be"). InnerVolumeSpecName "kube-api-access-8hmhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.059485 4728 generic.go:334] "Generic (PLEG): container finished" podID="43dbdd93-72c2-48b2-8829-7557303354be" containerID="7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae" exitCode=0 Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.059529 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerDied","Data":"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae"} Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.059771 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rww9w" event={"ID":"43dbdd93-72c2-48b2-8829-7557303354be","Type":"ContainerDied","Data":"7610c12fd2417f49e1c03afe53e965679d7a02763b822dd957456f5c032da172"} Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.059547 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rww9w" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.059838 4728 scope.go:117] "RemoveContainer" containerID="7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.060750 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" event={"ID":"7c2cda78-4bb1-416c-8762-8c1618a755ad","Type":"ContainerStarted","Data":"b86dc7e88a7d44a7fd58a97cf00afb7396f75ea68842dc460241980dab56371e"} Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.065960 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" event={"ID":"c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6","Type":"ContainerStarted","Data":"288c8128c06d9f3065a4208bb09491c859128b32b109fe3d13fde96a26382230"} Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.066306 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tmgk2" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="registry-server" containerID="cri-o://aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579" gracePeriod=2 Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.080385 4728 scope.go:117] "RemoveContainer" containerID="4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.102268 4728 scope.go:117] "RemoveContainer" containerID="0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.117494 4728 scope.go:117] "RemoveContainer" containerID="7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae" Dec 05 11:23:27 crc kubenswrapper[4728]: E1205 11:23:27.117994 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae\": container with ID starting with 7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae not found: ID does not exist" containerID="7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.118021 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae"} err="failed to get container status \"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae\": rpc error: code = NotFound desc = could not find container \"7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae\": container with ID starting with 7ffde46742be307b2fef562f8dd59f56ea8fbd377f0fee667d709e36e70e82ae not found: ID does not exist" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.118040 4728 scope.go:117] "RemoveContainer" containerID="4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5" Dec 05 11:23:27 crc kubenswrapper[4728]: E1205 11:23:27.118317 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5\": container with ID starting with 4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5 not found: ID does not exist" containerID="4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.118388 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5"} err="failed to get container status \"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5\": rpc error: code = NotFound desc = could not find container \"4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5\": container with ID starting with 4cefeb15262676b3aeb8276110c61ee2653cb234f9c45da4fe85198acaea49e5 not found: ID does not exist" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.118446 4728 scope.go:117] "RemoveContainer" containerID="0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28" Dec 05 11:23:27 crc kubenswrapper[4728]: E1205 11:23:27.118813 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28\": container with ID starting with 0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28 not found: ID does not exist" containerID="0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.118841 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28"} err="failed to get container status \"0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28\": rpc error: code = NotFound desc = could not find container \"0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28\": container with ID starting with 0458bc827b7ed2bed99db509da5b20b28e2382bb882fe9368ef564da72481d28 not found: ID does not exist" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.147495 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.147578 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hmhv\" (UniqueName: \"kubernetes.io/projected/43dbdd93-72c2-48b2-8829-7557303354be-kube-api-access-8hmhv\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.544728 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43dbdd93-72c2-48b2-8829-7557303354be" (UID: "43dbdd93-72c2-48b2-8829-7557303354be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.552605 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dbdd93-72c2-48b2-8829-7557303354be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.720028 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.726060 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rww9w"] Dec 05 11:23:27 crc kubenswrapper[4728]: I1205 11:23:27.993158 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.072800 4728 generic.go:334] "Generic (PLEG): container finished" podID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerID="aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579" exitCode=0 Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.072829 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerDied","Data":"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579"} Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.072861 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmgk2" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.072874 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmgk2" event={"ID":"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95","Type":"ContainerDied","Data":"6d58beb700e2028a2bbb7428810ac98ce9f1d9a3450a1c994e116a27cc6c2fbb"} Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.072894 4728 scope.go:117] "RemoveContainer" containerID="aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.073840 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.088156 4728 scope.go:117] "RemoveContainer" containerID="cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.109855 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" podStartSLOduration=2.284481366 podStartE2EDuration="8.109832118s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="2025-12-05 11:23:20.921668197 +0000 UTC m=+935.063790890" lastFinishedPulling="2025-12-05 11:23:26.747018939 +0000 UTC m=+940.889141642" observedRunningTime="2025-12-05 11:23:28.104502712 +0000 UTC m=+942.246625425" watchObservedRunningTime="2025-12-05 11:23:28.109832118 +0000 UTC m=+942.251954831" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.119250 4728 scope.go:117] "RemoveContainer" containerID="357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.126143 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-6jmjp" podStartSLOduration=2.253076762 podStartE2EDuration="8.126123406s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="2025-12-05 11:23:20.870661325 +0000 UTC m=+935.012784018" lastFinishedPulling="2025-12-05 11:23:26.743707929 +0000 UTC m=+940.885830662" observedRunningTime="2025-12-05 11:23:28.120716277 +0000 UTC m=+942.262838970" watchObservedRunningTime="2025-12-05 11:23:28.126123406 +0000 UTC m=+942.268246099" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.146266 4728 scope.go:117] "RemoveContainer" containerID="aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579" Dec 05 11:23:28 crc kubenswrapper[4728]: E1205 11:23:28.149498 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579\": container with ID starting with aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579 not found: ID does not exist" containerID="aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.149605 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579"} err="failed to get container status \"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579\": rpc error: code = NotFound desc = could not find container \"aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579\": container with ID starting with aeeb8aa4418c25dd9c874040d501ae72d5e0def34582a15e9bd95791fe55b579 not found: ID does not exist" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.149656 4728 scope.go:117] "RemoveContainer" containerID="cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee" Dec 05 11:23:28 crc kubenswrapper[4728]: E1205 11:23:28.150146 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee\": container with ID starting with cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee not found: ID does not exist" containerID="cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.150221 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee"} err="failed to get container status \"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee\": rpc error: code = NotFound desc = could not find container \"cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee\": container with ID starting with cb732e060e26cc3fd3a4514487260fe8e24e0d303cbbbcb2e3c6beae518c78ee not found: ID does not exist" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.150266 4728 scope.go:117] "RemoveContainer" containerID="357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472" Dec 05 11:23:28 crc kubenswrapper[4728]: E1205 11:23:28.150682 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472\": container with ID starting with 357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472 not found: ID does not exist" containerID="357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.150730 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472"} err="failed to get container status \"357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472\": rpc error: code = NotFound desc = could not find container \"357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472\": container with ID starting with 357a349bcb91f8c192f6e10ed47a62e9f941e5ca3e04630c05f4d98cf2613472 not found: ID does not exist" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.160477 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities\") pod \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.160608 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2znc8\" (UniqueName: \"kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8\") pod \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.160651 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content\") pod \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\" (UID: \"b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95\") " Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.162639 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities" (OuterVolumeSpecName: "utilities") pod "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" (UID: "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.165588 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.167662 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8" (OuterVolumeSpecName: "kube-api-access-2znc8") pod "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" (UID: "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95"). InnerVolumeSpecName "kube-api-access-2znc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.179372 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" (UID: "b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.266501 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2znc8\" (UniqueName: \"kubernetes.io/projected/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-kube-api-access-2znc8\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.266536 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.360273 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43dbdd93-72c2-48b2-8829-7557303354be" path="/var/lib/kubelet/pods/43dbdd93-72c2-48b2-8829-7557303354be/volumes" Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.388400 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:28 crc kubenswrapper[4728]: I1205 11:23:28.396570 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmgk2"] Dec 05 11:23:30 crc kubenswrapper[4728]: I1205 11:23:30.364517 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" path="/var/lib/kubelet/pods/b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95/volumes" Dec 05 11:23:30 crc kubenswrapper[4728]: I1205 11:23:30.752044 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-x9g4q" Dec 05 11:23:30 crc kubenswrapper[4728]: I1205 11:23:30.979259 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:30 crc kubenswrapper[4728]: I1205 11:23:30.979317 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:30 crc kubenswrapper[4728]: I1205 11:23:30.985605 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:31 crc kubenswrapper[4728]: I1205 11:23:31.099455 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7c9dd58c6b-n9cfq" Dec 05 11:23:31 crc kubenswrapper[4728]: I1205 11:23:31.157885 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:23:40 crc kubenswrapper[4728]: I1205 11:23:40.686185 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7xz5t" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.689579 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg"] Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690599 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="extract-content" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690620 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="extract-content" Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690639 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="extract-utilities" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690651 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="extract-utilities" Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690668 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690684 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690715 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690727 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690743 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="extract-utilities" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690755 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="extract-utilities" Dec 05 11:23:53 crc kubenswrapper[4728]: E1205 11:23:53.690774 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="extract-content" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690785 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="extract-content" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.690992 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbdd93-72c2-48b2-8829-7557303354be" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.691013 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9733c4e-7cc6-45e6-9c3a-b4ca13d58e95" containerName="registry-server" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.692522 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.695011 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.699122 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg"] Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.732033 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.732145 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.732206 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9f8r\" (UniqueName: \"kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.833909 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.834024 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.834071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9f8r\" (UniqueName: \"kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.834489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.834736 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:53 crc kubenswrapper[4728]: I1205 11:23:53.852518 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9f8r\" (UniqueName: \"kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:54 crc kubenswrapper[4728]: I1205 11:23:54.017526 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:54 crc kubenswrapper[4728]: I1205 11:23:54.459602 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg"] Dec 05 11:23:55 crc kubenswrapper[4728]: I1205 11:23:55.251962 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerID="6411a0ed164ef47d0333dee163cdd2fdc0d56a44abe7ed82cc6c7f6f75f91788" exitCode=0 Dec 05 11:23:55 crc kubenswrapper[4728]: I1205 11:23:55.252067 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" event={"ID":"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f","Type":"ContainerDied","Data":"6411a0ed164ef47d0333dee163cdd2fdc0d56a44abe7ed82cc6c7f6f75f91788"} Dec 05 11:23:55 crc kubenswrapper[4728]: I1205 11:23:55.253266 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" event={"ID":"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f","Type":"ContainerStarted","Data":"3a4793507e26544f4bc4d9919684890b5667151aa0a269111f4e1aca70504a58"} Dec 05 11:23:55 crc kubenswrapper[4728]: I1205 11:23:55.702239 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:23:55 crc kubenswrapper[4728]: I1205 11:23:55.702315 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.214961 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-f6bjc" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerName="console" containerID="cri-o://cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd" gracePeriod=15 Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.665059 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f6bjc_fda4dac4-0200-4740-a9c1-c3897809c2c0/console/0.log" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.665325 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771481 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s778f\" (UniqueName: \"kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771568 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771606 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771701 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771758 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771859 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.771894 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle\") pod \"fda4dac4-0200-4740-a9c1-c3897809c2c0\" (UID: \"fda4dac4-0200-4740-a9c1-c3897809c2c0\") " Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.772533 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config" (OuterVolumeSpecName: "console-config") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.772543 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.772920 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca" (OuterVolumeSpecName: "service-ca") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.773043 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.777088 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.777115 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f" (OuterVolumeSpecName: "kube-api-access-s778f") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "kube-api-access-s778f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.777470 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "fda4dac4-0200-4740-a9c1-c3897809c2c0" (UID: "fda4dac4-0200-4740-a9c1-c3897809c2c0"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873017 4728 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873047 4728 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873056 4728 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873066 4728 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fda4dac4-0200-4740-a9c1-c3897809c2c0-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873076 4728 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873085 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s778f\" (UniqueName: \"kubernetes.io/projected/fda4dac4-0200-4740-a9c1-c3897809c2c0-kube-api-access-s778f\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:56 crc kubenswrapper[4728]: I1205 11:23:56.873095 4728 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fda4dac4-0200-4740-a9c1-c3897809c2c0-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.263660 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerID="2acb05cae5a9a55a108de6b7ce1d737e03dbfbbc257cc48c38a7aa9c35979e01" exitCode=0 Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.263866 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" event={"ID":"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f","Type":"ContainerDied","Data":"2acb05cae5a9a55a108de6b7ce1d737e03dbfbbc257cc48c38a7aa9c35979e01"} Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266105 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f6bjc_fda4dac4-0200-4740-a9c1-c3897809c2c0/console/0.log" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266169 4728 generic.go:334] "Generic (PLEG): container finished" podID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerID="cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd" exitCode=2 Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266196 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f6bjc" event={"ID":"fda4dac4-0200-4740-a9c1-c3897809c2c0","Type":"ContainerDied","Data":"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd"} Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266220 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f6bjc" event={"ID":"fda4dac4-0200-4740-a9c1-c3897809c2c0","Type":"ContainerDied","Data":"9ed360310291c085deb9e0eee4983d0af107074582cc1cca853c83e4bccaf740"} Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266269 4728 scope.go:117] "RemoveContainer" containerID="cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.266442 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f6bjc" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.301553 4728 scope.go:117] "RemoveContainer" containerID="cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd" Dec 05 11:23:57 crc kubenswrapper[4728]: E1205 11:23:57.303297 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd\": container with ID starting with cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd not found: ID does not exist" containerID="cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.303341 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd"} err="failed to get container status \"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd\": rpc error: code = NotFound desc = could not find container \"cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd\": container with ID starting with cfb91ae6e2ab001db5c34c90c9b66451fe8660e70c9175e87849763f87fbb0dd not found: ID does not exist" Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.304352 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:23:57 crc kubenswrapper[4728]: I1205 11:23:57.308644 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-f6bjc"] Dec 05 11:23:58 crc kubenswrapper[4728]: I1205 11:23:58.276995 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerID="632f124ce8aaf8e34e629f2c892f3007fb0aba6209d2765618a1440d994b4f60" exitCode=0 Dec 05 11:23:58 crc kubenswrapper[4728]: I1205 11:23:58.277098 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" event={"ID":"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f","Type":"ContainerDied","Data":"632f124ce8aaf8e34e629f2c892f3007fb0aba6209d2765618a1440d994b4f60"} Dec 05 11:23:58 crc kubenswrapper[4728]: I1205 11:23:58.365595 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" path="/var/lib/kubelet/pods/fda4dac4-0200-4740-a9c1-c3897809c2c0/volumes" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.600977 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.704760 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle\") pod \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.704868 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9f8r\" (UniqueName: \"kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r\") pod \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.705208 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util\") pod \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\" (UID: \"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f\") " Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.707212 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle" (OuterVolumeSpecName: "bundle") pod "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" (UID: "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.712036 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r" (OuterVolumeSpecName: "kube-api-access-j9f8r") pod "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" (UID: "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f"). InnerVolumeSpecName "kube-api-access-j9f8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.739624 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util" (OuterVolumeSpecName: "util") pod "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" (UID: "dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.806933 4728 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.806985 4728 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:59 crc kubenswrapper[4728]: I1205 11:23:59.807007 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9f8r\" (UniqueName: \"kubernetes.io/projected/dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f-kube-api-access-j9f8r\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:00 crc kubenswrapper[4728]: I1205 11:24:00.290765 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" event={"ID":"dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f","Type":"ContainerDied","Data":"3a4793507e26544f4bc4d9919684890b5667151aa0a269111f4e1aca70504a58"} Dec 05 11:24:00 crc kubenswrapper[4728]: I1205 11:24:00.291249 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a4793507e26544f4bc4d9919684890b5667151aa0a269111f4e1aca70504a58" Dec 05 11:24:00 crc kubenswrapper[4728]: I1205 11:24:00.291388 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.601628 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26"] Dec 05 11:24:08 crc kubenswrapper[4728]: E1205 11:24:08.602329 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="extract" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602342 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="extract" Dec 05 11:24:08 crc kubenswrapper[4728]: E1205 11:24:08.602357 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerName="console" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602364 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerName="console" Dec 05 11:24:08 crc kubenswrapper[4728]: E1205 11:24:08.602380 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="util" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602388 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="util" Dec 05 11:24:08 crc kubenswrapper[4728]: E1205 11:24:08.602405 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="pull" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602412 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="pull" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602503 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda4dac4-0200-4740-a9c1-c3897809c2c0" containerName="console" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602519 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f" containerName="extract" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.602931 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.605275 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-l4lc9" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.605328 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.605847 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.606257 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.607583 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.624150 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26"] Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.718389 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2jwx\" (UniqueName: \"kubernetes.io/projected/1f9a485b-4186-4184-9f2a-81a4b74105d9-kube-api-access-x2jwx\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.718608 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-webhook-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.718636 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-apiservice-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.820411 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-webhook-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.820480 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-apiservice-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.820523 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2jwx\" (UniqueName: \"kubernetes.io/projected/1f9a485b-4186-4184-9f2a-81a4b74105d9-kube-api-access-x2jwx\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.827326 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-webhook-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.827359 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f9a485b-4186-4184-9f2a-81a4b74105d9-apiservice-cert\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.839959 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2jwx\" (UniqueName: \"kubernetes.io/projected/1f9a485b-4186-4184-9f2a-81a4b74105d9-kube-api-access-x2jwx\") pod \"metallb-operator-controller-manager-dc46c65cb-kfn26\" (UID: \"1f9a485b-4186-4184-9f2a-81a4b74105d9\") " pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:08 crc kubenswrapper[4728]: I1205 11:24:08.923152 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.009268 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b"] Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.010408 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.014467 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.014669 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-6hzjw" Dec 05 11:24:09 crc kubenswrapper[4728]: W1205 11:24:09.014818 4728 reflector.go:561] object-"metallb-system"/"metallb-webhook-cert": failed to list *v1.Secret: secrets "metallb-webhook-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Dec 05 11:24:09 crc kubenswrapper[4728]: E1205 11:24:09.014862 4728 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"metallb-webhook-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"metallb-webhook-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.035292 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b"] Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.125959 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-apiservice-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.126133 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-webhook-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.126190 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdbnt\" (UniqueName: \"kubernetes.io/projected/e085b90a-7d0a-4027-bf16-477076627681-kube-api-access-qdbnt\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.201334 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26"] Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.228975 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-apiservice-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.229047 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-webhook-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.229071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdbnt\" (UniqueName: \"kubernetes.io/projected/e085b90a-7d0a-4027-bf16-477076627681-kube-api-access-qdbnt\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.237732 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-apiservice-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.237816 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e085b90a-7d0a-4027-bf16-477076627681-webhook-cert\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.250712 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdbnt\" (UniqueName: \"kubernetes.io/projected/e085b90a-7d0a-4027-bf16-477076627681-kube-api-access-qdbnt\") pod \"metallb-operator-webhook-server-585ddd7f97-8nw7b\" (UID: \"e085b90a-7d0a-4027-bf16-477076627681\") " pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.342503 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.344367 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" event={"ID":"1f9a485b-4186-4184-9f2a-81a4b74105d9","Type":"ContainerStarted","Data":"79e0bacd369f16b19a9df79690246c454de028f3f646d6b0f386fbeac32fedc8"} Dec 05 11:24:09 crc kubenswrapper[4728]: I1205 11:24:09.566753 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b"] Dec 05 11:24:09 crc kubenswrapper[4728]: W1205 11:24:09.579137 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode085b90a_7d0a_4027_bf16_477076627681.slice/crio-73a9bee592965c2a5d0d1229ec3cb75cacaa52bb537c92377417803b40623a6e WatchSource:0}: Error finding container 73a9bee592965c2a5d0d1229ec3cb75cacaa52bb537c92377417803b40623a6e: Status 404 returned error can't find the container with id 73a9bee592965c2a5d0d1229ec3cb75cacaa52bb537c92377417803b40623a6e Dec 05 11:24:10 crc kubenswrapper[4728]: I1205 11:24:10.360230 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" event={"ID":"e085b90a-7d0a-4027-bf16-477076627681","Type":"ContainerStarted","Data":"73a9bee592965c2a5d0d1229ec3cb75cacaa52bb537c92377417803b40623a6e"} Dec 05 11:24:10 crc kubenswrapper[4728]: I1205 11:24:10.464861 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 11:24:12 crc kubenswrapper[4728]: I1205 11:24:12.368490 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" event={"ID":"1f9a485b-4186-4184-9f2a-81a4b74105d9","Type":"ContainerStarted","Data":"cf53baa8ad0f08441917153a996110374d823654720bf5d5262b7a8a0cbe2c14"} Dec 05 11:24:12 crc kubenswrapper[4728]: I1205 11:24:12.369001 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:12 crc kubenswrapper[4728]: I1205 11:24:12.392606 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" podStartSLOduration=1.451782477 podStartE2EDuration="4.392586673s" podCreationTimestamp="2025-12-05 11:24:08 +0000 UTC" firstStartedPulling="2025-12-05 11:24:09.229811587 +0000 UTC m=+983.371934280" lastFinishedPulling="2025-12-05 11:24:12.170615783 +0000 UTC m=+986.312738476" observedRunningTime="2025-12-05 11:24:12.387149117 +0000 UTC m=+986.529271820" watchObservedRunningTime="2025-12-05 11:24:12.392586673 +0000 UTC m=+986.534709376" Dec 05 11:24:15 crc kubenswrapper[4728]: I1205 11:24:15.383548 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" event={"ID":"e085b90a-7d0a-4027-bf16-477076627681","Type":"ContainerStarted","Data":"a27021eabdc4058c04c7ce737485c6aa440b92b0a920f28ce82f3f7228d90cc2"} Dec 05 11:24:15 crc kubenswrapper[4728]: I1205 11:24:15.383959 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:15 crc kubenswrapper[4728]: I1205 11:24:15.404615 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" podStartSLOduration=1.828856353 podStartE2EDuration="7.404594975s" podCreationTimestamp="2025-12-05 11:24:08 +0000 UTC" firstStartedPulling="2025-12-05 11:24:09.582223568 +0000 UTC m=+983.724346281" lastFinishedPulling="2025-12-05 11:24:15.15796219 +0000 UTC m=+989.300084903" observedRunningTime="2025-12-05 11:24:15.402445997 +0000 UTC m=+989.544568700" watchObservedRunningTime="2025-12-05 11:24:15.404594975 +0000 UTC m=+989.546717668" Dec 05 11:24:25 crc kubenswrapper[4728]: I1205 11:24:25.702425 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:24:25 crc kubenswrapper[4728]: I1205 11:24:25.703086 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:24:29 crc kubenswrapper[4728]: I1205 11:24:29.347291 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-585ddd7f97-8nw7b" Dec 05 11:24:48 crc kubenswrapper[4728]: I1205 11:24:48.933866 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-dc46c65cb-kfn26" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.809709 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-fpsrk"] Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.812658 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.817342 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.817695 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mhv9t" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.818444 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.823363 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5"] Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.824322 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.826973 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.852096 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5"] Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918360 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918421 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics-certs\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918528 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918582 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cgtb\" (UniqueName: \"kubernetes.io/projected/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-kube-api-access-4cgtb\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918611 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-startup\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918657 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-conf\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918696 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzttn\" (UniqueName: \"kubernetes.io/projected/cc6442af-701e-429c-9fe5-93dbe8884f45-kube-api-access-wzttn\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918849 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-sockets\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.918884 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-reloader\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.932172 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-7wkzn"] Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.933339 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7wkzn" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.935350 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.935648 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.935919 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.936096 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-n9v5v" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.941518 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-2frdd"] Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.942741 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.950095 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 11:24:49 crc kubenswrapper[4728]: I1205 11:24:49.958496 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2frdd"] Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020114 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8ml4\" (UniqueName: \"kubernetes.io/projected/ee5be811-5e9e-4a19-955b-944a9a457060-kube-api-access-n8ml4\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020183 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-reloader\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020219 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020293 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics-certs\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020343 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.020365 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cgtb\" (UniqueName: \"kubernetes.io/projected/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-kube-api-access-4cgtb\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.020592 4728 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.020690 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert podName:b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06 nodeName:}" failed. No retries permitted until 2025-12-05 11:24:50.520671711 +0000 UTC m=+1024.662794404 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert") pod "frr-k8s-webhook-server-7fcb986d4-9m6g5" (UID: "b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06") : secret "frr-k8s-webhook-server-cert" not found Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021269 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-startup\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021308 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-conf\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021344 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzttn\" (UniqueName: \"kubernetes.io/projected/cc6442af-701e-429c-9fe5-93dbe8884f45-kube-api-access-wzttn\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021356 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021379 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021465 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021564 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-reloader\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021576 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ee5be811-5e9e-4a19-955b-944a9a457060-metallb-excludel2\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021649 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-conf\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021729 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021754 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d88hf\" (UniqueName: \"kubernetes.io/projected/09963a34-a1db-4854-8a6a-475da8222a7b-kube-api-access-d88hf\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021832 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-sockets\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.021854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-cert\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.022174 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-sockets\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.022964 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cc6442af-701e-429c-9fe5-93dbe8884f45-frr-startup\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.029317 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cc6442af-701e-429c-9fe5-93dbe8884f45-metrics-certs\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.038698 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cgtb\" (UniqueName: \"kubernetes.io/projected/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-kube-api-access-4cgtb\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.040377 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzttn\" (UniqueName: \"kubernetes.io/projected/cc6442af-701e-429c-9fe5-93dbe8884f45-kube-api-access-wzttn\") pod \"frr-k8s-fpsrk\" (UID: \"cc6442af-701e-429c-9fe5-93dbe8884f45\") " pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122697 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ee5be811-5e9e-4a19-955b-944a9a457060-metallb-excludel2\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122780 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122828 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d88hf\" (UniqueName: \"kubernetes.io/projected/09963a34-a1db-4854-8a6a-475da8222a7b-kube-api-access-d88hf\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122860 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-cert\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122885 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8ml4\" (UniqueName: \"kubernetes.io/projected/ee5be811-5e9e-4a19-955b-944a9a457060-kube-api-access-n8ml4\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122960 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.122985 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123150 4728 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123213 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs podName:09963a34-a1db-4854-8a6a-475da8222a7b nodeName:}" failed. No retries permitted until 2025-12-05 11:24:50.623193589 +0000 UTC m=+1024.765316282 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs") pod "controller-f8648f98b-2frdd" (UID: "09963a34-a1db-4854-8a6a-475da8222a7b") : secret "controller-certs-secret" not found Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.123531 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ee5be811-5e9e-4a19-955b-944a9a457060-metallb-excludel2\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123567 4728 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123596 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs podName:ee5be811-5e9e-4a19-955b-944a9a457060 nodeName:}" failed. No retries permitted until 2025-12-05 11:24:50.62358724 +0000 UTC m=+1024.765709933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs") pod "speaker-7wkzn" (UID: "ee5be811-5e9e-4a19-955b-944a9a457060") : secret "speaker-certs-secret" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123644 4728 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.123665 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist podName:ee5be811-5e9e-4a19-955b-944a9a457060 nodeName:}" failed. No retries permitted until 2025-12-05 11:24:50.623656792 +0000 UTC m=+1024.765779485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist") pod "speaker-7wkzn" (UID: "ee5be811-5e9e-4a19-955b-944a9a457060") : secret "metallb-memberlist" not found Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.126002 4728 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.134765 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.137537 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-cert\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.147200 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8ml4\" (UniqueName: \"kubernetes.io/projected/ee5be811-5e9e-4a19-955b-944a9a457060-kube-api-access-n8ml4\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.156349 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d88hf\" (UniqueName: \"kubernetes.io/projected/09963a34-a1db-4854-8a6a-475da8222a7b-kube-api-access-d88hf\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.526982 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.531255 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-9m6g5\" (UID: \"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.629322 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.629932 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.630069 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.631990 4728 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 11:24:50 crc kubenswrapper[4728]: E1205 11:24:50.632100 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist podName:ee5be811-5e9e-4a19-955b-944a9a457060 nodeName:}" failed. No retries permitted until 2025-12-05 11:24:51.63207431 +0000 UTC m=+1025.774197003 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist") pod "speaker-7wkzn" (UID: "ee5be811-5e9e-4a19-955b-944a9a457060") : secret "metallb-memberlist" not found Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.644432 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-metrics-certs\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.645082 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09963a34-a1db-4854-8a6a-475da8222a7b-metrics-certs\") pod \"controller-f8648f98b-2frdd\" (UID: \"09963a34-a1db-4854-8a6a-475da8222a7b\") " pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.649609 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"a6fe91f1ef201a148b079a3e7b89c33d296a27882922b82c8f783da690e69c16"} Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.753200 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:50 crc kubenswrapper[4728]: I1205 11:24:50.863579 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.072661 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2frdd"] Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.190175 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5"] Dec 05 11:24:51 crc kubenswrapper[4728]: W1205 11:24:51.197657 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb85fcb5b_5696_42c4_bb19_1e0d5fa8ff06.slice/crio-402d1480d6b0cd00b965d226e1d2e5a787e4de25d762dddcde948f2a606d250c WatchSource:0}: Error finding container 402d1480d6b0cd00b965d226e1d2e5a787e4de25d762dddcde948f2a606d250c: Status 404 returned error can't find the container with id 402d1480d6b0cd00b965d226e1d2e5a787e4de25d762dddcde948f2a606d250c Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.650374 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.657995 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ee5be811-5e9e-4a19-955b-944a9a457060-memberlist\") pod \"speaker-7wkzn\" (UID: \"ee5be811-5e9e-4a19-955b-944a9a457060\") " pod="metallb-system/speaker-7wkzn" Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.665333 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2frdd" event={"ID":"09963a34-a1db-4854-8a6a-475da8222a7b","Type":"ContainerStarted","Data":"db42390907fcb5bf2757092184f9c42231e187a2fca9770c75db825b7ab02508"} Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.665470 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2frdd" event={"ID":"09963a34-a1db-4854-8a6a-475da8222a7b","Type":"ContainerStarted","Data":"f3f71216bef05066de081851fe7043651c0851142c3ee3ebcf129b0badad266a"} Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.665506 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.665560 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2frdd" event={"ID":"09963a34-a1db-4854-8a6a-475da8222a7b","Type":"ContainerStarted","Data":"d086f3a468c6ad7f2a442a4b4a9bf8d4129d8457405135fc419ee154db09dbf5"} Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.666675 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" event={"ID":"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06","Type":"ContainerStarted","Data":"402d1480d6b0cd00b965d226e1d2e5a787e4de25d762dddcde948f2a606d250c"} Dec 05 11:24:51 crc kubenswrapper[4728]: I1205 11:24:51.751084 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7wkzn" Dec 05 11:24:51 crc kubenswrapper[4728]: W1205 11:24:51.772113 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee5be811_5e9e_4a19_955b_944a9a457060.slice/crio-a1a3bd6aa870caf5d540f005afd23217b2d78fdba04f9f6c15bd03ff59405ea9 WatchSource:0}: Error finding container a1a3bd6aa870caf5d540f005afd23217b2d78fdba04f9f6c15bd03ff59405ea9: Status 404 returned error can't find the container with id a1a3bd6aa870caf5d540f005afd23217b2d78fdba04f9f6c15bd03ff59405ea9 Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.674746 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7wkzn" event={"ID":"ee5be811-5e9e-4a19-955b-944a9a457060","Type":"ContainerStarted","Data":"d378aa7a4df4b1d6b1d4eb8c9662c0eb324545b6cf34ca8c045bb123c0fc87f9"} Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.675138 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7wkzn" event={"ID":"ee5be811-5e9e-4a19-955b-944a9a457060","Type":"ContainerStarted","Data":"bfbcd167df2537b4aceef95b3525629194f100c338b6fb839acb92745bc0e755"} Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.675156 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7wkzn" event={"ID":"ee5be811-5e9e-4a19-955b-944a9a457060","Type":"ContainerStarted","Data":"a1a3bd6aa870caf5d540f005afd23217b2d78fdba04f9f6c15bd03ff59405ea9"} Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.675381 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-7wkzn" Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.698470 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-7wkzn" podStartSLOduration=3.698443748 podStartE2EDuration="3.698443748s" podCreationTimestamp="2025-12-05 11:24:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:24:52.696832345 +0000 UTC m=+1026.838955048" watchObservedRunningTime="2025-12-05 11:24:52.698443748 +0000 UTC m=+1026.840566451" Dec 05 11:24:52 crc kubenswrapper[4728]: I1205 11:24:52.700241 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-2frdd" podStartSLOduration=3.700232476 podStartE2EDuration="3.700232476s" podCreationTimestamp="2025-12-05 11:24:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:24:51.689528267 +0000 UTC m=+1025.831650990" watchObservedRunningTime="2025-12-05 11:24:52.700232476 +0000 UTC m=+1026.842355169" Dec 05 11:24:55 crc kubenswrapper[4728]: I1205 11:24:55.701978 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:24:55 crc kubenswrapper[4728]: I1205 11:24:55.702325 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:24:55 crc kubenswrapper[4728]: I1205 11:24:55.702377 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:24:55 crc kubenswrapper[4728]: I1205 11:24:55.703123 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:24:55 crc kubenswrapper[4728]: I1205 11:24:55.703182 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9" gracePeriod=600 Dec 05 11:24:56 crc kubenswrapper[4728]: I1205 11:24:56.714830 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9" exitCode=0 Dec 05 11:24:56 crc kubenswrapper[4728]: I1205 11:24:56.715063 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9"} Dec 05 11:24:56 crc kubenswrapper[4728]: I1205 11:24:56.715210 4728 scope.go:117] "RemoveContainer" containerID="326e3eadf7eef59824ca257b48d5202ca29f557deff0b73b1ca60a1f7865a5b8" Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.721353 4728 generic.go:334] "Generic (PLEG): container finished" podID="cc6442af-701e-429c-9fe5-93dbe8884f45" containerID="82c5bd459ce33844c45293aa2349de09c90b0e5880f4aa63abc884b0a908d462" exitCode=0 Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.721641 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerDied","Data":"82c5bd459ce33844c45293aa2349de09c90b0e5880f4aa63abc884b0a908d462"} Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.724779 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995"} Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.727133 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" event={"ID":"b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06","Type":"ContainerStarted","Data":"5a0b7ae9292293fd12cf4d68c5269876b7fffea7b28e0a45bb8db52245740e3b"} Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.727329 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:24:57 crc kubenswrapper[4728]: I1205 11:24:57.769642 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" podStartSLOduration=2.805064936 podStartE2EDuration="8.769620236s" podCreationTimestamp="2025-12-05 11:24:49 +0000 UTC" firstStartedPulling="2025-12-05 11:24:51.200698017 +0000 UTC m=+1025.342820710" lastFinishedPulling="2025-12-05 11:24:57.165253307 +0000 UTC m=+1031.307376010" observedRunningTime="2025-12-05 11:24:57.763587234 +0000 UTC m=+1031.905709947" watchObservedRunningTime="2025-12-05 11:24:57.769620236 +0000 UTC m=+1031.911742939" Dec 05 11:24:58 crc kubenswrapper[4728]: I1205 11:24:58.739640 4728 generic.go:334] "Generic (PLEG): container finished" podID="cc6442af-701e-429c-9fe5-93dbe8884f45" containerID="4d53b2b0fd0d27af8519d5578f3b2263e4f417ddf13628894db29938f830b07d" exitCode=0 Dec 05 11:24:58 crc kubenswrapper[4728]: I1205 11:24:58.739723 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerDied","Data":"4d53b2b0fd0d27af8519d5578f3b2263e4f417ddf13628894db29938f830b07d"} Dec 05 11:24:59 crc kubenswrapper[4728]: E1205 11:24:59.515644 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc6442af_701e_429c_9fe5_93dbe8884f45.slice/crio-60a8b561bc0c9dc518556f13fbfb19f74ba655e357e394578eab50964590a728.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:24:59 crc kubenswrapper[4728]: I1205 11:24:59.750737 4728 generic.go:334] "Generic (PLEG): container finished" podID="cc6442af-701e-429c-9fe5-93dbe8884f45" containerID="60a8b561bc0c9dc518556f13fbfb19f74ba655e357e394578eab50964590a728" exitCode=0 Dec 05 11:24:59 crc kubenswrapper[4728]: I1205 11:24:59.750823 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerDied","Data":"60a8b561bc0c9dc518556f13fbfb19f74ba655e357e394578eab50964590a728"} Dec 05 11:25:00 crc kubenswrapper[4728]: I1205 11:25:00.762562 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"f6cc1b89239f1111b6fb1f880cba8719f8942d8b6af12a054982cd8cfc2d8494"} Dec 05 11:25:00 crc kubenswrapper[4728]: I1205 11:25:00.762955 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"fe0675b096241c6606c2e1ada124bf6ce86ccfed75d5d3ec6beb6bab24a408ac"} Dec 05 11:25:00 crc kubenswrapper[4728]: I1205 11:25:00.762966 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"a4eb9fb137ee45ed8fe113793179821fcf37c74983b56ee24ae47d12064c84d5"} Dec 05 11:25:00 crc kubenswrapper[4728]: I1205 11:25:00.762977 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"e0b3ca37e27bdb257d9a1a4e1c4bc74353381b3c6c36a47166fdc02d235cb3f2"} Dec 05 11:25:00 crc kubenswrapper[4728]: I1205 11:25:00.762985 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"5bafe0c1579e092cd3142482c45f7b4611fbf5fe9a7afb18bf9bd925ce72a2f9"} Dec 05 11:25:01 crc kubenswrapper[4728]: I1205 11:25:01.757112 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-7wkzn" Dec 05 11:25:01 crc kubenswrapper[4728]: I1205 11:25:01.769961 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fpsrk" event={"ID":"cc6442af-701e-429c-9fe5-93dbe8884f45","Type":"ContainerStarted","Data":"344d2fb6f085bf62cf3cd54dcd31a50f0a0ef43aa28dd6dc655961118ba2bc1c"} Dec 05 11:25:01 crc kubenswrapper[4728]: I1205 11:25:01.770706 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:25:05 crc kubenswrapper[4728]: I1205 11:25:05.135997 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:25:05 crc kubenswrapper[4728]: I1205 11:25:05.175379 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:25:05 crc kubenswrapper[4728]: I1205 11:25:05.203388 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-fpsrk" podStartSLOduration=9.329109468 podStartE2EDuration="16.203365472s" podCreationTimestamp="2025-12-05 11:24:49 +0000 UTC" firstStartedPulling="2025-12-05 11:24:50.266306899 +0000 UTC m=+1024.408429592" lastFinishedPulling="2025-12-05 11:24:57.140562903 +0000 UTC m=+1031.282685596" observedRunningTime="2025-12-05 11:25:01.799321485 +0000 UTC m=+1035.941444188" watchObservedRunningTime="2025-12-05 11:25:05.203365472 +0000 UTC m=+1039.345488175" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.203191 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.205283 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.207950 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-vjbsn" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.208671 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.208927 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.210679 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.294771 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfnht\" (UniqueName: \"kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht\") pod \"openstack-operator-index-zcbwh\" (UID: \"b86e08a1-03ce-42b0-aef8-978b71bbf504\") " pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.396760 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfnht\" (UniqueName: \"kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht\") pod \"openstack-operator-index-zcbwh\" (UID: \"b86e08a1-03ce-42b0-aef8-978b71bbf504\") " pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.417111 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfnht\" (UniqueName: \"kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht\") pod \"openstack-operator-index-zcbwh\" (UID: \"b86e08a1-03ce-42b0-aef8-978b71bbf504\") " pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.534258 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:08 crc kubenswrapper[4728]: I1205 11:25:08.977401 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:08 crc kubenswrapper[4728]: W1205 11:25:08.990173 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb86e08a1_03ce_42b0_aef8_978b71bbf504.slice/crio-3f94f929cc538f2b3edfb04c59a8843070df042a16ba824ed846cda81b81210f WatchSource:0}: Error finding container 3f94f929cc538f2b3edfb04c59a8843070df042a16ba824ed846cda81b81210f: Status 404 returned error can't find the container with id 3f94f929cc538f2b3edfb04c59a8843070df042a16ba824ed846cda81b81210f Dec 05 11:25:09 crc kubenswrapper[4728]: I1205 11:25:09.825141 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zcbwh" event={"ID":"b86e08a1-03ce-42b0-aef8-978b71bbf504","Type":"ContainerStarted","Data":"3f94f929cc538f2b3edfb04c59a8843070df042a16ba824ed846cda81b81210f"} Dec 05 11:25:10 crc kubenswrapper[4728]: I1205 11:25:10.138996 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-fpsrk" Dec 05 11:25:10 crc kubenswrapper[4728]: I1205 11:25:10.764167 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-9m6g5" Dec 05 11:25:10 crc kubenswrapper[4728]: I1205 11:25:10.867898 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-2frdd" Dec 05 11:25:11 crc kubenswrapper[4728]: I1205 11:25:11.856920 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zcbwh" event={"ID":"b86e08a1-03ce-42b0-aef8-978b71bbf504","Type":"ContainerStarted","Data":"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37"} Dec 05 11:25:11 crc kubenswrapper[4728]: I1205 11:25:11.879713 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-zcbwh" podStartSLOduration=1.444863168 podStartE2EDuration="3.879685411s" podCreationTimestamp="2025-12-05 11:25:08 +0000 UTC" firstStartedPulling="2025-12-05 11:25:08.994571774 +0000 UTC m=+1043.136694477" lastFinishedPulling="2025-12-05 11:25:11.429394027 +0000 UTC m=+1045.571516720" observedRunningTime="2025-12-05 11:25:11.877094051 +0000 UTC m=+1046.019216854" watchObservedRunningTime="2025-12-05 11:25:11.879685411 +0000 UTC m=+1046.021808114" Dec 05 11:25:13 crc kubenswrapper[4728]: I1205 11:25:13.382919 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:13 crc kubenswrapper[4728]: I1205 11:25:13.868740 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-zcbwh" podUID="b86e08a1-03ce-42b0-aef8-978b71bbf504" containerName="registry-server" containerID="cri-o://c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37" gracePeriod=2 Dec 05 11:25:13 crc kubenswrapper[4728]: I1205 11:25:13.991044 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-lvhsr"] Dec 05 11:25:13 crc kubenswrapper[4728]: I1205 11:25:13.992068 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:13 crc kubenswrapper[4728]: I1205 11:25:13.999079 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95fqm\" (UniqueName: \"kubernetes.io/projected/bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d-kube-api-access-95fqm\") pod \"openstack-operator-index-lvhsr\" (UID: \"bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d\") " pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.012511 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lvhsr"] Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.100321 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95fqm\" (UniqueName: \"kubernetes.io/projected/bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d-kube-api-access-95fqm\") pod \"openstack-operator-index-lvhsr\" (UID: \"bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d\") " pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.127432 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95fqm\" (UniqueName: \"kubernetes.io/projected/bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d-kube-api-access-95fqm\") pod \"openstack-operator-index-lvhsr\" (UID: \"bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d\") " pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.257102 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.302896 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfnht\" (UniqueName: \"kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht\") pod \"b86e08a1-03ce-42b0-aef8-978b71bbf504\" (UID: \"b86e08a1-03ce-42b0-aef8-978b71bbf504\") " Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.317073 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht" (OuterVolumeSpecName: "kube-api-access-pfnht") pod "b86e08a1-03ce-42b0-aef8-978b71bbf504" (UID: "b86e08a1-03ce-42b0-aef8-978b71bbf504"). InnerVolumeSpecName "kube-api-access-pfnht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.321354 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.404281 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfnht\" (UniqueName: \"kubernetes.io/projected/b86e08a1-03ce-42b0-aef8-978b71bbf504-kube-api-access-pfnht\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.760354 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lvhsr"] Dec 05 11:25:14 crc kubenswrapper[4728]: W1205 11:25:14.766323 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcbc2f72_6b48_4afd_9b1a_cd9d4a32692d.slice/crio-5c5bcf5c01204553b1718ea6cb57cfaefc70bc970e58e3463245a5e05cc30452 WatchSource:0}: Error finding container 5c5bcf5c01204553b1718ea6cb57cfaefc70bc970e58e3463245a5e05cc30452: Status 404 returned error can't find the container with id 5c5bcf5c01204553b1718ea6cb57cfaefc70bc970e58e3463245a5e05cc30452 Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.876491 4728 generic.go:334] "Generic (PLEG): container finished" podID="b86e08a1-03ce-42b0-aef8-978b71bbf504" containerID="c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37" exitCode=0 Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.876573 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zcbwh" event={"ID":"b86e08a1-03ce-42b0-aef8-978b71bbf504","Type":"ContainerDied","Data":"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37"} Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.876606 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zcbwh" event={"ID":"b86e08a1-03ce-42b0-aef8-978b71bbf504","Type":"ContainerDied","Data":"3f94f929cc538f2b3edfb04c59a8843070df042a16ba824ed846cda81b81210f"} Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.876623 4728 scope.go:117] "RemoveContainer" containerID="c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.876574 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zcbwh" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.878394 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lvhsr" event={"ID":"bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d","Type":"ContainerStarted","Data":"5c5bcf5c01204553b1718ea6cb57cfaefc70bc970e58e3463245a5e05cc30452"} Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.891681 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.893175 4728 scope.go:117] "RemoveContainer" containerID="c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37" Dec 05 11:25:14 crc kubenswrapper[4728]: E1205 11:25:14.893569 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37\": container with ID starting with c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37 not found: ID does not exist" containerID="c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.893611 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37"} err="failed to get container status \"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37\": rpc error: code = NotFound desc = could not find container \"c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37\": container with ID starting with c0c8468eb635df728fa9f1be409e1d7a3320984a2fe8fb685de170e4354b6d37 not found: ID does not exist" Dec 05 11:25:14 crc kubenswrapper[4728]: I1205 11:25:14.896740 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-zcbwh"] Dec 05 11:25:15 crc kubenswrapper[4728]: I1205 11:25:15.887553 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lvhsr" event={"ID":"bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d","Type":"ContainerStarted","Data":"6bf5c3c05a09f1b2cb40a722d563843cc5e0945403885319cee9947ac3ae5d0c"} Dec 05 11:25:15 crc kubenswrapper[4728]: I1205 11:25:15.910787 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-lvhsr" podStartSLOduration=2.866919127 podStartE2EDuration="2.910759057s" podCreationTimestamp="2025-12-05 11:25:13 +0000 UTC" firstStartedPulling="2025-12-05 11:25:14.769886564 +0000 UTC m=+1048.912009257" lastFinishedPulling="2025-12-05 11:25:14.813726494 +0000 UTC m=+1048.955849187" observedRunningTime="2025-12-05 11:25:15.906937444 +0000 UTC m=+1050.049060137" watchObservedRunningTime="2025-12-05 11:25:15.910759057 +0000 UTC m=+1050.052881760" Dec 05 11:25:16 crc kubenswrapper[4728]: I1205 11:25:16.389182 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b86e08a1-03ce-42b0-aef8-978b71bbf504" path="/var/lib/kubelet/pods/b86e08a1-03ce-42b0-aef8-978b71bbf504/volumes" Dec 05 11:25:24 crc kubenswrapper[4728]: I1205 11:25:24.322110 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:24 crc kubenswrapper[4728]: I1205 11:25:24.324816 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:24 crc kubenswrapper[4728]: I1205 11:25:24.364580 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:24 crc kubenswrapper[4728]: I1205 11:25:24.979716 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-lvhsr" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.045248 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p"] Dec 05 11:25:38 crc kubenswrapper[4728]: E1205 11:25:38.047330 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86e08a1-03ce-42b0-aef8-978b71bbf504" containerName="registry-server" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.047365 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86e08a1-03ce-42b0-aef8-978b71bbf504" containerName="registry-server" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.047557 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b86e08a1-03ce-42b0-aef8-978b71bbf504" containerName="registry-server" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.048988 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.052050 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5l5gj" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.056581 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p"] Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.143038 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.143191 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d69sx\" (UniqueName: \"kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.143262 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.244290 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.244387 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d69sx\" (UniqueName: \"kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.244420 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.245141 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.245188 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.267577 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d69sx\" (UniqueName: \"kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx\") pod \"e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.371060 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:38 crc kubenswrapper[4728]: I1205 11:25:38.832052 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p"] Dec 05 11:25:39 crc kubenswrapper[4728]: I1205 11:25:39.056699 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerStarted","Data":"b16ac8f84ec9ac7e6ad0051b173ae2592d40ef27423cb501678a482b864e7ef6"} Dec 05 11:25:39 crc kubenswrapper[4728]: I1205 11:25:39.056856 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerStarted","Data":"a18a5cf45da85dd8d9f911fe836332ba7a7f1e2eed7b15644cbe120eaf78db4c"} Dec 05 11:25:40 crc kubenswrapper[4728]: I1205 11:25:40.066839 4728 generic.go:334] "Generic (PLEG): container finished" podID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerID="b16ac8f84ec9ac7e6ad0051b173ae2592d40ef27423cb501678a482b864e7ef6" exitCode=0 Dec 05 11:25:40 crc kubenswrapper[4728]: I1205 11:25:40.066948 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerDied","Data":"b16ac8f84ec9ac7e6ad0051b173ae2592d40ef27423cb501678a482b864e7ef6"} Dec 05 11:25:41 crc kubenswrapper[4728]: I1205 11:25:41.080471 4728 generic.go:334] "Generic (PLEG): container finished" podID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerID="d27ebc11f8b8310db29cce171ea8095a3cc06ccd3dcb790205e02a8749a003a4" exitCode=0 Dec 05 11:25:41 crc kubenswrapper[4728]: I1205 11:25:41.080595 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerDied","Data":"d27ebc11f8b8310db29cce171ea8095a3cc06ccd3dcb790205e02a8749a003a4"} Dec 05 11:25:42 crc kubenswrapper[4728]: I1205 11:25:42.092270 4728 generic.go:334] "Generic (PLEG): container finished" podID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerID="64182f3cd49c38ed8f4b02c9ed879c633ea99386148a6def81e83681b840cc2f" exitCode=0 Dec 05 11:25:42 crc kubenswrapper[4728]: I1205 11:25:42.092382 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerDied","Data":"64182f3cd49c38ed8f4b02c9ed879c633ea99386148a6def81e83681b840cc2f"} Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.366035 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.433021 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d69sx\" (UniqueName: \"kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx\") pod \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.434237 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util\") pod \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.434350 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle\") pod \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\" (UID: \"e0dc12b8-8d15-4ef7-a8f9-985442001a82\") " Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.435186 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle" (OuterVolumeSpecName: "bundle") pod "e0dc12b8-8d15-4ef7-a8f9-985442001a82" (UID: "e0dc12b8-8d15-4ef7-a8f9-985442001a82"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.439164 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx" (OuterVolumeSpecName: "kube-api-access-d69sx") pod "e0dc12b8-8d15-4ef7-a8f9-985442001a82" (UID: "e0dc12b8-8d15-4ef7-a8f9-985442001a82"). InnerVolumeSpecName "kube-api-access-d69sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.448619 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util" (OuterVolumeSpecName: "util") pod "e0dc12b8-8d15-4ef7-a8f9-985442001a82" (UID: "e0dc12b8-8d15-4ef7-a8f9-985442001a82"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.536407 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d69sx\" (UniqueName: \"kubernetes.io/projected/e0dc12b8-8d15-4ef7-a8f9-985442001a82-kube-api-access-d69sx\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.536445 4728 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:43 crc kubenswrapper[4728]: I1205 11:25:43.536457 4728 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0dc12b8-8d15-4ef7-a8f9-985442001a82-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:44 crc kubenswrapper[4728]: I1205 11:25:44.110244 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" event={"ID":"e0dc12b8-8d15-4ef7-a8f9-985442001a82","Type":"ContainerDied","Data":"a18a5cf45da85dd8d9f911fe836332ba7a7f1e2eed7b15644cbe120eaf78db4c"} Dec 05 11:25:44 crc kubenswrapper[4728]: I1205 11:25:44.110317 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a18a5cf45da85dd8d9f911fe836332ba7a7f1e2eed7b15644cbe120eaf78db4c" Dec 05 11:25:44 crc kubenswrapper[4728]: I1205 11:25:44.110846 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.026860 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7"] Dec 05 11:25:50 crc kubenswrapper[4728]: E1205 11:25:50.027612 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="util" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.027623 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="util" Dec 05 11:25:50 crc kubenswrapper[4728]: E1205 11:25:50.027639 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="pull" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.027644 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="pull" Dec 05 11:25:50 crc kubenswrapper[4728]: E1205 11:25:50.027667 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="extract" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.027674 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="extract" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.027771 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0dc12b8-8d15-4ef7-a8f9-985442001a82" containerName="extract" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.028225 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.030229 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-hzz8v" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.062011 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7"] Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.129012 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8z79\" (UniqueName: \"kubernetes.io/projected/226ccaf4-1c8b-4a98-a3a6-122629462baa-kube-api-access-p8z79\") pod \"openstack-operator-controller-operator-6767b55986-t74w7\" (UID: \"226ccaf4-1c8b-4a98-a3a6-122629462baa\") " pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.230270 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8z79\" (UniqueName: \"kubernetes.io/projected/226ccaf4-1c8b-4a98-a3a6-122629462baa-kube-api-access-p8z79\") pod \"openstack-operator-controller-operator-6767b55986-t74w7\" (UID: \"226ccaf4-1c8b-4a98-a3a6-122629462baa\") " pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.254705 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8z79\" (UniqueName: \"kubernetes.io/projected/226ccaf4-1c8b-4a98-a3a6-122629462baa-kube-api-access-p8z79\") pod \"openstack-operator-controller-operator-6767b55986-t74w7\" (UID: \"226ccaf4-1c8b-4a98-a3a6-122629462baa\") " pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.351683 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.576754 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7"] Dec 05 11:25:50 crc kubenswrapper[4728]: I1205 11:25:50.596069 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:25:51 crc kubenswrapper[4728]: I1205 11:25:51.165460 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" event={"ID":"226ccaf4-1c8b-4a98-a3a6-122629462baa","Type":"ContainerStarted","Data":"6356c9b7e78ec7f04cca59003127533dc44dd83ef8c55c8e95ef59ef0a76d67b"} Dec 05 11:25:55 crc kubenswrapper[4728]: I1205 11:25:55.193144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" event={"ID":"226ccaf4-1c8b-4a98-a3a6-122629462baa","Type":"ContainerStarted","Data":"ac6ea55bcc3859ce35b4b2a848aa35b972cd8b1088cf08dc078958e44351649c"} Dec 05 11:25:55 crc kubenswrapper[4728]: I1205 11:25:55.193656 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:25:55 crc kubenswrapper[4728]: I1205 11:25:55.244023 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" podStartSLOduration=1.225154643 podStartE2EDuration="5.24398907s" podCreationTimestamp="2025-12-05 11:25:50 +0000 UTC" firstStartedPulling="2025-12-05 11:25:50.595458823 +0000 UTC m=+1084.737581536" lastFinishedPulling="2025-12-05 11:25:54.61429327 +0000 UTC m=+1088.756415963" observedRunningTime="2025-12-05 11:25:55.242757977 +0000 UTC m=+1089.384880690" watchObservedRunningTime="2025-12-05 11:25:55.24398907 +0000 UTC m=+1089.386111833" Dec 05 11:26:00 crc kubenswrapper[4728]: I1205 11:26:00.364055 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6767b55986-t74w7" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.835769 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.837879 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.840240 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.841512 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ggk9z" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.841667 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.844517 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.846335 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-gkfj2" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.857703 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.859277 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.864319 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-7t5z2" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.868911 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.891062 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.892313 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.895840 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-g4fj2" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.904155 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.916861 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.918001 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.921343 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.923235 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-rmnp5" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.932311 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.936950 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.938354 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.941379 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-tdq4g" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.948567 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvc9p\" (UniqueName: \"kubernetes.io/projected/941ddd04-049e-4247-98c2-6ef2117c2c69-kube-api-access-pvc9p\") pod \"cinder-operator-controller-manager-859b6ccc6-zqc5j\" (UID: \"941ddd04-049e-4247-98c2-6ef2117c2c69\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.948674 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn7gx\" (UniqueName: \"kubernetes.io/projected/5d689dc0-c7c8-4af2-8f4c-45863ab88b69-kube-api-access-nn7gx\") pod \"barbican-operator-controller-manager-7d9dfd778-btnkh\" (UID: \"5d689dc0-c7c8-4af2-8f4c-45863ab88b69\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.954465 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.992362 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r"] Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.993713 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.999581 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 11:26:27 crc kubenswrapper[4728]: I1205 11:26:27.999824 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-zxcv9" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.000336 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.001785 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.005060 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.007445 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-w4jrx" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.012746 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.016170 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.021890 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.021920 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-wbv8z" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.027561 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.028911 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.034027 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.044507 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-zwsjq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.051189 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052339 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052409 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnf6v\" (UniqueName: \"kubernetes.io/projected/403718e0-87fa-402a-844e-6b458a15b003-kube-api-access-tnf6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-ghbcj\" (UID: \"403718e0-87fa-402a-844e-6b458a15b003\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052465 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb6lr\" (UniqueName: \"kubernetes.io/projected/34f5a6c5-a316-450d-83a1-affbdd4d2e0e-kube-api-access-tb6lr\") pod \"horizon-operator-controller-manager-68c6d99b8f-2zvrw\" (UID: \"34f5a6c5-a316-450d-83a1-affbdd4d2e0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052495 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgrrf\" (UniqueName: \"kubernetes.io/projected/df0f8091-3107-4a49-9672-8332e4c1f8c0-kube-api-access-kgrrf\") pod \"designate-operator-controller-manager-78b4bc895b-dnlfm\" (UID: \"df0f8091-3107-4a49-9672-8332e4c1f8c0\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052535 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvc9p\" (UniqueName: \"kubernetes.io/projected/941ddd04-049e-4247-98c2-6ef2117c2c69-kube-api-access-pvc9p\") pod \"cinder-operator-controller-manager-859b6ccc6-zqc5j\" (UID: \"941ddd04-049e-4247-98c2-6ef2117c2c69\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052577 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7h79\" (UniqueName: \"kubernetes.io/projected/3149306c-d64a-4bdf-994a-ecec0489e472-kube-api-access-d7h79\") pod \"glance-operator-controller-manager-77987cd8cd-nch5j\" (UID: \"3149306c-d64a-4bdf-994a-ecec0489e472\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.052649 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn7gx\" (UniqueName: \"kubernetes.io/projected/5d689dc0-c7c8-4af2-8f4c-45863ab88b69-kube-api-access-nn7gx\") pod \"barbican-operator-controller-manager-7d9dfd778-btnkh\" (UID: \"5d689dc0-c7c8-4af2-8f4c-45863ab88b69\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.058635 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.061093 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-258d2" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.066846 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.078406 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.079838 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.082187 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-sjnpp" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.096496 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvc9p\" (UniqueName: \"kubernetes.io/projected/941ddd04-049e-4247-98c2-6ef2117c2c69-kube-api-access-pvc9p\") pod \"cinder-operator-controller-manager-859b6ccc6-zqc5j\" (UID: \"941ddd04-049e-4247-98c2-6ef2117c2c69\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.107410 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.116883 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn7gx\" (UniqueName: \"kubernetes.io/projected/5d689dc0-c7c8-4af2-8f4c-45863ab88b69-kube-api-access-nn7gx\") pod \"barbican-operator-controller-manager-7d9dfd778-btnkh\" (UID: \"5d689dc0-c7c8-4af2-8f4c-45863ab88b69\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.119981 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.121009 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.134641 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-jdckx" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.135284 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.171763 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175554 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnf6v\" (UniqueName: \"kubernetes.io/projected/403718e0-87fa-402a-844e-6b458a15b003-kube-api-access-tnf6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-ghbcj\" (UID: \"403718e0-87fa-402a-844e-6b458a15b003\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175594 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9p6q\" (UniqueName: \"kubernetes.io/projected/03a7d3e9-4e85-496e-963f-f0c1e7e4cf04-kube-api-access-s9p6q\") pod \"ironic-operator-controller-manager-6c548fd776-7jg64\" (UID: \"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175629 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb6lr\" (UniqueName: \"kubernetes.io/projected/34f5a6c5-a316-450d-83a1-affbdd4d2e0e-kube-api-access-tb6lr\") pod \"horizon-operator-controller-manager-68c6d99b8f-2zvrw\" (UID: \"34f5a6c5-a316-450d-83a1-affbdd4d2e0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175657 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgrrf\" (UniqueName: \"kubernetes.io/projected/df0f8091-3107-4a49-9672-8332e4c1f8c0-kube-api-access-kgrrf\") pod \"designate-operator-controller-manager-78b4bc895b-dnlfm\" (UID: \"df0f8091-3107-4a49-9672-8332e4c1f8c0\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175692 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b58p\" (UniqueName: \"kubernetes.io/projected/a1c012ce-e23c-4235-b2b2-56306e3d4722-kube-api-access-8b58p\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175714 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-979cn\" (UniqueName: \"kubernetes.io/projected/d21af02e-d731-402f-aa09-1f705dc4e82b-kube-api-access-979cn\") pod \"manila-operator-controller-manager-7c79b5df47-n8x6r\" (UID: \"d21af02e-d731-402f-aa09-1f705dc4e82b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175740 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7h79\" (UniqueName: \"kubernetes.io/projected/3149306c-d64a-4bdf-994a-ecec0489e472-kube-api-access-d7h79\") pod \"glance-operator-controller-manager-77987cd8cd-nch5j\" (UID: \"3149306c-d64a-4bdf-994a-ecec0489e472\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175871 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpq69\" (UniqueName: \"kubernetes.io/projected/ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10-kube-api-access-lpq69\") pod \"mariadb-operator-controller-manager-56bbcc9d85-xjwhb\" (UID: \"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175911 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.175946 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gw8k\" (UniqueName: \"kubernetes.io/projected/6f5ec4c9-95e8-43ea-a137-9c781e4f234f-kube-api-access-5gw8k\") pod \"keystone-operator-controller-manager-7765d96ddf-6wcpq\" (UID: \"6f5ec4c9-95e8-43ea-a137-9c781e4f234f\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.182110 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.184040 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.203724 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.246925 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnf6v\" (UniqueName: \"kubernetes.io/projected/403718e0-87fa-402a-844e-6b458a15b003-kube-api-access-tnf6v\") pod \"heat-operator-controller-manager-5f64f6f8bb-ghbcj\" (UID: \"403718e0-87fa-402a-844e-6b458a15b003\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.257593 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgrrf\" (UniqueName: \"kubernetes.io/projected/df0f8091-3107-4a49-9672-8332e4c1f8c0-kube-api-access-kgrrf\") pod \"designate-operator-controller-manager-78b4bc895b-dnlfm\" (UID: \"df0f8091-3107-4a49-9672-8332e4c1f8c0\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.258058 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7h79\" (UniqueName: \"kubernetes.io/projected/3149306c-d64a-4bdf-994a-ecec0489e472-kube-api-access-d7h79\") pod \"glance-operator-controller-manager-77987cd8cd-nch5j\" (UID: \"3149306c-d64a-4bdf-994a-ecec0489e472\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.258117 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb6lr\" (UniqueName: \"kubernetes.io/projected/34f5a6c5-a316-450d-83a1-affbdd4d2e0e-kube-api-access-tb6lr\") pod \"horizon-operator-controller-manager-68c6d99b8f-2zvrw\" (UID: \"34f5a6c5-a316-450d-83a1-affbdd4d2e0e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.258447 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-ql9hh" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279117 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279177 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gw8k\" (UniqueName: \"kubernetes.io/projected/6f5ec4c9-95e8-43ea-a137-9c781e4f234f-kube-api-access-5gw8k\") pod \"keystone-operator-controller-manager-7765d96ddf-6wcpq\" (UID: \"6f5ec4c9-95e8-43ea-a137-9c781e4f234f\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279199 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.279337 4728 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.279404 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert podName:a1c012ce-e23c-4235-b2b2-56306e3d4722 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:28.779383076 +0000 UTC m=+1122.921505769 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert") pod "infra-operator-controller-manager-57548d458d-c2s6r" (UID: "a1c012ce-e23c-4235-b2b2-56306e3d4722") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279210 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sbk8\" (UniqueName: \"kubernetes.io/projected/98cd0df5-f4a5-4515-80b5-d0ac625a527a-kube-api-access-2sbk8\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-sdqtl\" (UID: \"98cd0df5-f4a5-4515-80b5-d0ac625a527a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279595 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnsrq\" (UniqueName: \"kubernetes.io/projected/45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6-kube-api-access-dnsrq\") pod \"nova-operator-controller-manager-697bc559fc-8fvcf\" (UID: \"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279618 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9p6q\" (UniqueName: \"kubernetes.io/projected/03a7d3e9-4e85-496e-963f-f0c1e7e4cf04-kube-api-access-s9p6q\") pod \"ironic-operator-controller-manager-6c548fd776-7jg64\" (UID: \"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279646 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b58p\" (UniqueName: \"kubernetes.io/projected/a1c012ce-e23c-4235-b2b2-56306e3d4722-kube-api-access-8b58p\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279667 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-979cn\" (UniqueName: \"kubernetes.io/projected/d21af02e-d731-402f-aa09-1f705dc4e82b-kube-api-access-979cn\") pod \"manila-operator-controller-manager-7c79b5df47-n8x6r\" (UID: \"d21af02e-d731-402f-aa09-1f705dc4e82b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279692 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vn56\" (UniqueName: \"kubernetes.io/projected/854e2a55-450f-48e7-93fb-fca327f4fd18-kube-api-access-7vn56\") pod \"octavia-operator-controller-manager-998648c74-t7lpc\" (UID: \"854e2a55-450f-48e7-93fb-fca327f4fd18\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.279728 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpq69\" (UniqueName: \"kubernetes.io/projected/ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10-kube-api-access-lpq69\") pod \"mariadb-operator-controller-manager-56bbcc9d85-xjwhb\" (UID: \"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.287385 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.306530 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpq69\" (UniqueName: \"kubernetes.io/projected/ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10-kube-api-access-lpq69\") pod \"mariadb-operator-controller-manager-56bbcc9d85-xjwhb\" (UID: \"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.306878 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.307954 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gw8k\" (UniqueName: \"kubernetes.io/projected/6f5ec4c9-95e8-43ea-a137-9c781e4f234f-kube-api-access-5gw8k\") pod \"keystone-operator-controller-manager-7765d96ddf-6wcpq\" (UID: \"6f5ec4c9-95e8-43ea-a137-9c781e4f234f\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.308549 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.312632 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-sgv5t" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.313201 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9p6q\" (UniqueName: \"kubernetes.io/projected/03a7d3e9-4e85-496e-963f-f0c1e7e4cf04-kube-api-access-s9p6q\") pod \"ironic-operator-controller-manager-6c548fd776-7jg64\" (UID: \"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.317613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-979cn\" (UniqueName: \"kubernetes.io/projected/d21af02e-d731-402f-aa09-1f705dc4e82b-kube-api-access-979cn\") pod \"manila-operator-controller-manager-7c79b5df47-n8x6r\" (UID: \"d21af02e-d731-402f-aa09-1f705dc4e82b\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.317725 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.330919 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.332491 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b58p\" (UniqueName: \"kubernetes.io/projected/a1c012ce-e23c-4235-b2b2-56306e3d4722-kube-api-access-8b58p\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.332732 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.353753 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.354820 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xnq92"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.357744 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-vhxmm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.359377 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.362871 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-bzlhp" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.363148 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388593 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vn56\" (UniqueName: \"kubernetes.io/projected/854e2a55-450f-48e7-93fb-fca327f4fd18-kube-api-access-7vn56\") pod \"octavia-operator-controller-manager-998648c74-t7lpc\" (UID: \"854e2a55-450f-48e7-93fb-fca327f4fd18\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388703 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ljzk\" (UniqueName: \"kubernetes.io/projected/dd2fed26-0e5c-49e0-ad15-3936a13680e7-kube-api-access-9ljzk\") pod \"placement-operator-controller-manager-78f8948974-xnq92\" (UID: \"dd2fed26-0e5c-49e0-ad15-3936a13680e7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388766 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388829 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sbk8\" (UniqueName: \"kubernetes.io/projected/98cd0df5-f4a5-4515-80b5-d0ac625a527a-kube-api-access-2sbk8\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-sdqtl\" (UID: \"98cd0df5-f4a5-4515-80b5-d0ac625a527a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388864 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnsrq\" (UniqueName: \"kubernetes.io/projected/45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6-kube-api-access-dnsrq\") pod \"nova-operator-controller-manager-697bc559fc-8fvcf\" (UID: \"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52flk\" (UniqueName: \"kubernetes.io/projected/b5925d20-e10a-4564-91f4-67acb55b2a01-kube-api-access-52flk\") pod \"ovn-operator-controller-manager-b6456fdb6-pqmth\" (UID: \"b5925d20-e10a-4564-91f4-67acb55b2a01\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.388984 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnm2k\" (UniqueName: \"kubernetes.io/projected/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-kube-api-access-xnm2k\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.397173 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.400131 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.400159 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xnq92"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.411262 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.420396 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sbk8\" (UniqueName: \"kubernetes.io/projected/98cd0df5-f4a5-4515-80b5-d0ac625a527a-kube-api-access-2sbk8\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-sdqtl\" (UID: \"98cd0df5-f4a5-4515-80b5-d0ac625a527a\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.424262 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.426721 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-lnmwt" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.430377 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vn56\" (UniqueName: \"kubernetes.io/projected/854e2a55-450f-48e7-93fb-fca327f4fd18-kube-api-access-7vn56\") pod \"octavia-operator-controller-manager-998648c74-t7lpc\" (UID: \"854e2a55-450f-48e7-93fb-fca327f4fd18\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.444376 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnsrq\" (UniqueName: \"kubernetes.io/projected/45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6-kube-api-access-dnsrq\") pod \"nova-operator-controller-manager-697bc559fc-8fvcf\" (UID: \"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.474291 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.481409 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.492121 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.492257 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxmt2\" (UniqueName: \"kubernetes.io/projected/db8744a4-edde-4a54-85e9-05089f650ba0-kube-api-access-sxmt2\") pod \"swift-operator-controller-manager-5f8c65bbfc-h6czw\" (UID: \"db8744a4-edde-4a54-85e9-05089f650ba0\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.492302 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52flk\" (UniqueName: \"kubernetes.io/projected/b5925d20-e10a-4564-91f4-67acb55b2a01-kube-api-access-52flk\") pod \"ovn-operator-controller-manager-b6456fdb6-pqmth\" (UID: \"b5925d20-e10a-4564-91f4-67acb55b2a01\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.492326 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnm2k\" (UniqueName: \"kubernetes.io/projected/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-kube-api-access-xnm2k\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.492412 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ljzk\" (UniqueName: \"kubernetes.io/projected/dd2fed26-0e5c-49e0-ad15-3936a13680e7-kube-api-access-9ljzk\") pod \"placement-operator-controller-manager-78f8948974-xnq92\" (UID: \"dd2fed26-0e5c-49e0-ad15-3936a13680e7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.492412 4728 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.492487 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert podName:6ee43140-9d2f-42c8-917f-eaa028a8e1b1 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:28.99246624 +0000 UTC m=+1123.134588933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" (UID: "6ee43140-9d2f-42c8-917f-eaa028a8e1b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.498184 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.499433 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.500162 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.500642 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.503217 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-sjcl2" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.513151 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnm2k\" (UniqueName: \"kubernetes.io/projected/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-kube-api-access-xnm2k\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.514773 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52flk\" (UniqueName: \"kubernetes.io/projected/b5925d20-e10a-4564-91f4-67acb55b2a01-kube-api-access-52flk\") pod \"ovn-operator-controller-manager-b6456fdb6-pqmth\" (UID: \"b5925d20-e10a-4564-91f4-67acb55b2a01\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.516163 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sq989"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.517411 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.519847 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.521258 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-njr7s" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.523029 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ljzk\" (UniqueName: \"kubernetes.io/projected/dd2fed26-0e5c-49e0-ad15-3936a13680e7-kube-api-access-9ljzk\") pod \"placement-operator-controller-manager-78f8948974-xnq92\" (UID: \"dd2fed26-0e5c-49e0-ad15-3936a13680e7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.542170 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.542662 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sq989"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.542761 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.566964 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.568232 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.572240 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-bwggm" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.591189 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.597153 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.597506 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26wwb\" (UniqueName: \"kubernetes.io/projected/c16435ec-544a-4d19-8667-925c045ecf61-kube-api-access-26wwb\") pod \"telemetry-operator-controller-manager-76cc84c6bb-gbdj9\" (UID: \"c16435ec-544a-4d19-8667-925c045ecf61\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.597543 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxmt2\" (UniqueName: \"kubernetes.io/projected/db8744a4-edde-4a54-85e9-05089f650ba0-kube-api-access-sxmt2\") pod \"swift-operator-controller-manager-5f8c65bbfc-h6czw\" (UID: \"db8744a4-edde-4a54-85e9-05089f650ba0\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.597618 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbx5l\" (UniqueName: \"kubernetes.io/projected/5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d-kube-api-access-fbx5l\") pod \"test-operator-controller-manager-5854674fcc-sq989\" (UID: \"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.597638 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbf7n\" (UniqueName: \"kubernetes.io/projected/a5b101e4-a4f7-4c73-8327-e09cce07eb51-kube-api-access-jbf7n\") pod \"watcher-operator-controller-manager-769dc69bc-tc4ks\" (UID: \"a5b101e4-a4f7-4c73-8327-e09cce07eb51\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.618285 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.621839 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxmt2\" (UniqueName: \"kubernetes.io/projected/db8744a4-edde-4a54-85e9-05089f650ba0-kube-api-access-sxmt2\") pod \"swift-operator-controller-manager-5f8c65bbfc-h6czw\" (UID: \"db8744a4-edde-4a54-85e9-05089f650ba0\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.623163 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.625097 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.628927 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.642659 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-zv2l7" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.642944 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.643050 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.663554 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.689648 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.695850 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.699501 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26wwb\" (UniqueName: \"kubernetes.io/projected/c16435ec-544a-4d19-8667-925c045ecf61-kube-api-access-26wwb\") pod \"telemetry-operator-controller-manager-76cc84c6bb-gbdj9\" (UID: \"c16435ec-544a-4d19-8667-925c045ecf61\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.699602 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbx5l\" (UniqueName: \"kubernetes.io/projected/5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d-kube-api-access-fbx5l\") pod \"test-operator-controller-manager-5854674fcc-sq989\" (UID: \"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.699630 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbf7n\" (UniqueName: \"kubernetes.io/projected/a5b101e4-a4f7-4c73-8327-e09cce07eb51-kube-api-access-jbf7n\") pod \"watcher-operator-controller-manager-769dc69bc-tc4ks\" (UID: \"a5b101e4-a4f7-4c73-8327-e09cce07eb51\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.700185 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.705668 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-5llhb" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.727175 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbf7n\" (UniqueName: \"kubernetes.io/projected/a5b101e4-a4f7-4c73-8327-e09cce07eb51-kube-api-access-jbf7n\") pod \"watcher-operator-controller-manager-769dc69bc-tc4ks\" (UID: \"a5b101e4-a4f7-4c73-8327-e09cce07eb51\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.735994 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbx5l\" (UniqueName: \"kubernetes.io/projected/5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d-kube-api-access-fbx5l\") pod \"test-operator-controller-manager-5854674fcc-sq989\" (UID: \"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.739101 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26wwb\" (UniqueName: \"kubernetes.io/projected/c16435ec-544a-4d19-8667-925c045ecf61-kube-api-access-26wwb\") pod \"telemetry-operator-controller-manager-76cc84c6bb-gbdj9\" (UID: \"c16435ec-544a-4d19-8667-925c045ecf61\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.744360 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.776186 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.791597 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.795985 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.801583 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.801667 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j759x\" (UniqueName: \"kubernetes.io/projected/891e8e93-da9a-4b87-8e69-04fe149274cd-kube-api-access-j759x\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.801744 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.801764 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.801870 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psf48\" (UniqueName: \"kubernetes.io/projected/04a349f4-b388-4a9c-8dbc-54bd1fb46934-kube-api-access-psf48\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sb7lq\" (UID: \"04a349f4-b388-4a9c-8dbc-54bd1fb46934\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.801876 4728 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.801937 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert podName:a1c012ce-e23c-4235-b2b2-56306e3d4722 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:29.801917934 +0000 UTC m=+1123.944040627 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert") pod "infra-operator-controller-manager-57548d458d-c2s6r" (UID: "a1c012ce-e23c-4235-b2b2-56306e3d4722") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.831737 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j"] Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.834585 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:26:28 crc kubenswrapper[4728]: W1205 11:26:28.839848 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d689dc0_c7c8_4af2_8f4c_45863ab88b69.slice/crio-bd1f80e59f76ab8f54497f19a240b5a1aa736e1a36ed2fe50087d76b787f5422 WatchSource:0}: Error finding container bd1f80e59f76ab8f54497f19a240b5a1aa736e1a36ed2fe50087d76b787f5422: Status 404 returned error can't find the container with id bd1f80e59f76ab8f54497f19a240b5a1aa736e1a36ed2fe50087d76b787f5422 Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.855470 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.903231 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.904265 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.904324 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psf48\" (UniqueName: \"kubernetes.io/projected/04a349f4-b388-4a9c-8dbc-54bd1fb46934-kube-api-access-psf48\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sb7lq\" (UID: \"04a349f4-b388-4a9c-8dbc-54bd1fb46934\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.904353 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.904392 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j759x\" (UniqueName: \"kubernetes.io/projected/891e8e93-da9a-4b87-8e69-04fe149274cd-kube-api-access-j759x\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.904754 4728 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.904813 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:29.404782117 +0000 UTC m=+1123.546904810 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "metrics-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.904969 4728 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: E1205 11:26:28.904990 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:29.404983952 +0000 UTC m=+1123.547106645 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "webhook-server-cert" not found Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.936315 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j759x\" (UniqueName: \"kubernetes.io/projected/891e8e93-da9a-4b87-8e69-04fe149274cd-kube-api-access-j759x\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.938590 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psf48\" (UniqueName: \"kubernetes.io/projected/04a349f4-b388-4a9c-8dbc-54bd1fb46934-kube-api-access-psf48\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sb7lq\" (UID: \"04a349f4-b388-4a9c-8dbc-54bd1fb46934\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" Dec 05 11:26:28 crc kubenswrapper[4728]: I1205 11:26:28.992299 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.005524 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.005629 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34f5a6c5_a316_450d_83a1_affbdd4d2e0e.slice/crio-fe0ce095956d01f54e9d2dcad94e097104d09f1c4c487804f71ae36dd0c06d84 WatchSource:0}: Error finding container fe0ce095956d01f54e9d2dcad94e097104d09f1c4c487804f71ae36dd0c06d84: Status 404 returned error can't find the container with id fe0ce095956d01f54e9d2dcad94e097104d09f1c4c487804f71ae36dd0c06d84 Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.005706 4728 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.005762 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert podName:6ee43140-9d2f-42c8-917f-eaa028a8e1b1 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:30.005746619 +0000 UTC m=+1124.147869312 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" (UID: "6ee43140-9d2f-42c8-917f-eaa028a8e1b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.040696 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.051549 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.127293 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq"] Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.143089 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03a7d3e9_4e85_496e_963f_f0c1e7e4cf04.slice/crio-7fa62d402c18346164b40b3fe9b1e7d62345837fc453043e3628fde560f9ecfa WatchSource:0}: Error finding container 7fa62d402c18346164b40b3fe9b1e7d62345837fc453043e3628fde560f9ecfa: Status 404 returned error can't find the container with id 7fa62d402c18346164b40b3fe9b1e7d62345837fc453043e3628fde560f9ecfa Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.242431 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.252398 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.272166 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.413750 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.413852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.413967 4728 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.414012 4728 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.414055 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:30.414030566 +0000 UTC m=+1124.556153259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "metrics-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.414074 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:30.414067597 +0000 UTC m=+1124.556190290 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "webhook-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.455653 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.467457 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" event={"ID":"d21af02e-d731-402f-aa09-1f705dc4e82b","Type":"ContainerStarted","Data":"90b49cea6b79a1389f8e262e3f56b71effc4dc59077bfa76df501456a95e6771"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.470846 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" event={"ID":"3149306c-d64a-4bdf-994a-ecec0489e472","Type":"ContainerStarted","Data":"45eca6d4a61bf507e628e0e1f414e85c2d96a3b3092e9d2e29373824579d4906"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.478329 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" event={"ID":"6f5ec4c9-95e8-43ea-a137-9c781e4f234f","Type":"ContainerStarted","Data":"9dd09ea79ee3d8b5e0cc37f29921ff22cf4b31d7a6a8d9782d5b79e1005202e5"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.481434 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.482069 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" event={"ID":"34f5a6c5-a316-450d-83a1-affbdd4d2e0e","Type":"ContainerStarted","Data":"fe0ce095956d01f54e9d2dcad94e097104d09f1c4c487804f71ae36dd0c06d84"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.483544 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" event={"ID":"5d689dc0-c7c8-4af2-8f4c-45863ab88b69","Type":"ContainerStarted","Data":"bd1f80e59f76ab8f54497f19a240b5a1aa736e1a36ed2fe50087d76b787f5422"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.484621 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" event={"ID":"403718e0-87fa-402a-844e-6b458a15b003","Type":"ContainerStarted","Data":"1bf8fe7b5232208b98d413de5a7bc1f0bc848b97b456d12aa24cd19a0416a5f9"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.485964 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" event={"ID":"941ddd04-049e-4247-98c2-6ef2117c2c69","Type":"ContainerStarted","Data":"68206e30184d47290a8a5511bfbb48520cfe9857482a7f393bab7fa964524d89"} Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.487575 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" event={"ID":"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04","Type":"ContainerStarted","Data":"7fa62d402c18346164b40b3fe9b1e7d62345837fc453043e3628fde560f9ecfa"} Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.488003 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98cd0df5_f4a5_4515_80b5_d0ac625a527a.slice/crio-6cc950fba6c0201acb69fd5967525a340324685d02594ba2464268c004c9f263 WatchSource:0}: Error finding container 6cc950fba6c0201acb69fd5967525a340324685d02594ba2464268c004c9f263: Status 404 returned error can't find the container with id 6cc950fba6c0201acb69fd5967525a340324685d02594ba2464268c004c9f263 Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.570778 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.580658 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.586875 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.600135 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.607442 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf"] Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.609135 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod854e2a55_450f_48e7_93fb_fca327f4fd18.slice/crio-28f94304f07f46f74c33589e57e9e32146f770efd7a11de01fac144a9c1d2980 WatchSource:0}: Error finding container 28f94304f07f46f74c33589e57e9e32146f770efd7a11de01fac144a9c1d2980: Status 404 returned error can't find the container with id 28f94304f07f46f74c33589e57e9e32146f770efd7a11de01fac144a9c1d2980 Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.611366 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xnq92"] Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.627685 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-26wwb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-gbdj9_openstack-operators(c16435ec-544a-4d19-8667-925c045ecf61): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.630218 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd2fed26_0e5c_49e0_ad15_3936a13680e7.slice/crio-0ae9982fdd110dd68a2d8f771351430009dc3ab2e723645311ee8eb317c10764 WatchSource:0}: Error finding container 0ae9982fdd110dd68a2d8f771351430009dc3ab2e723645311ee8eb317c10764: Status 404 returned error can't find the container with id 0ae9982fdd110dd68a2d8f771351430009dc3ab2e723645311ee8eb317c10764 Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.631168 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-26wwb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-gbdj9_openstack-operators(c16435ec-544a-4d19-8667-925c045ecf61): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.632316 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" podUID="c16435ec-544a-4d19-8667-925c045ecf61" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.634027 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgrrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-dnlfm_openstack-operators(df0f8091-3107-4a49-9672-8332e4c1f8c0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.634034 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9ljzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-xnq92_openstack-operators(dd2fed26-0e5c-49e0-ad15-3936a13680e7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.636321 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9ljzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-xnq92_openstack-operators(dd2fed26-0e5c-49e0-ad15-3936a13680e7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.636387 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgrrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-dnlfm_openstack-operators(df0f8091-3107-4a49-9672-8332e4c1f8c0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.637513 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" podUID="df0f8091-3107-4a49-9672-8332e4c1f8c0" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.637535 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" podUID="dd2fed26-0e5c-49e0-ad15-3936a13680e7" Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.732332 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.737761 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks"] Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.745670 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-sq989"] Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.746461 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d7b3b1e_b8a3_4477_8d37_cfd6db1cf27d.slice/crio-40432c02998f4ead5d59a29a4b064db50c951f19e88113f57304562d5cd3a716 WatchSource:0}: Error finding container 40432c02998f4ead5d59a29a4b064db50c951f19e88113f57304562d5cd3a716: Status 404 returned error can't find the container with id 40432c02998f4ead5d59a29a4b064db50c951f19e88113f57304562d5cd3a716 Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.748758 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbx5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-sq989_openstack-operators(5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.751321 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbx5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-sq989_openstack-operators(5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.752622 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" podUID="5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d" Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.752992 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5b101e4_a4f7_4c73_8327_e09cce07eb51.slice/crio-222b2978102e843c3d2333b8958e673d296d1168d4b6c367316a69cf6189a231 WatchSource:0}: Error finding container 222b2978102e843c3d2333b8958e673d296d1168d4b6c367316a69cf6189a231: Status 404 returned error can't find the container with id 222b2978102e843c3d2333b8958e673d296d1168d4b6c367316a69cf6189a231 Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.756111 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jbf7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-tc4ks_openstack-operators(a5b101e4-a4f7-4c73-8327-e09cce07eb51): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.757778 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jbf7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-tc4ks_openstack-operators(a5b101e4-a4f7-4c73-8327-e09cce07eb51): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.759017 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" podUID="a5b101e4-a4f7-4c73-8327-e09cce07eb51" Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.780247 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq"] Dec 05 11:26:29 crc kubenswrapper[4728]: W1205 11:26:29.789821 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04a349f4_b388_4a9c_8dbc_54bd1fb46934.slice/crio-6dea6a221927662b8df72924998d4b6dc0c47e31757506ff8b215a0e542c7d09 WatchSource:0}: Error finding container 6dea6a221927662b8df72924998d4b6dc0c47e31757506ff8b215a0e542c7d09: Status 404 returned error can't find the container with id 6dea6a221927662b8df72924998d4b6dc0c47e31757506ff8b215a0e542c7d09 Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.792377 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-psf48,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-sb7lq_openstack-operators(04a349f4-b388-4a9c-8dbc-54bd1fb46934): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.793552 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" podUID="04a349f4-b388-4a9c-8dbc-54bd1fb46934" Dec 05 11:26:29 crc kubenswrapper[4728]: I1205 11:26:29.819249 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.819449 4728 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:29 crc kubenswrapper[4728]: E1205 11:26:29.819526 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert podName:a1c012ce-e23c-4235-b2b2-56306e3d4722 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:31.819508979 +0000 UTC m=+1125.961631672 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert") pod "infra-operator-controller-manager-57548d458d-c2s6r" (UID: "a1c012ce-e23c-4235-b2b2-56306e3d4722") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.026040 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.026463 4728 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.026517 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert podName:6ee43140-9d2f-42c8-917f-eaa028a8e1b1 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:32.026502119 +0000 UTC m=+1126.168624812 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" (UID: "6ee43140-9d2f-42c8-917f-eaa028a8e1b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.431948 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.432048 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.432142 4728 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.432187 4728 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.432222 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:32.432201657 +0000 UTC m=+1126.574324350 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "metrics-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.432280 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:32.432253169 +0000 UTC m=+1126.574375862 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "webhook-server-cert" not found Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.513144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" event={"ID":"04a349f4-b388-4a9c-8dbc-54bd1fb46934","Type":"ContainerStarted","Data":"6dea6a221927662b8df72924998d4b6dc0c47e31757506ff8b215a0e542c7d09"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.516232 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" podUID="04a349f4-b388-4a9c-8dbc-54bd1fb46934" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.520615 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" event={"ID":"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6","Type":"ContainerStarted","Data":"b334a8fe9dbd3c91c623a4b420db8a29f9bde08cc35e853335f4b0300b6ce70a"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.522066 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" event={"ID":"c16435ec-544a-4d19-8667-925c045ecf61","Type":"ContainerStarted","Data":"ea3b3f719d1f007a75f0853e933da8375353719d01a1e665d922c9dac251a609"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.527163 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" podUID="c16435ec-544a-4d19-8667-925c045ecf61" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.527999 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" event={"ID":"db8744a4-edde-4a54-85e9-05089f650ba0","Type":"ContainerStarted","Data":"a725aae32c5fa182b5786b5a0abe09c0e5ebabfc04cffd8968ce7347f09878de"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.535245 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" event={"ID":"a5b101e4-a4f7-4c73-8327-e09cce07eb51","Type":"ContainerStarted","Data":"222b2978102e843c3d2333b8958e673d296d1168d4b6c367316a69cf6189a231"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.537265 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" event={"ID":"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d","Type":"ContainerStarted","Data":"40432c02998f4ead5d59a29a4b064db50c951f19e88113f57304562d5cd3a716"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.542591 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" podUID="5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.543353 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" event={"ID":"854e2a55-450f-48e7-93fb-fca327f4fd18","Type":"ContainerStarted","Data":"28f94304f07f46f74c33589e57e9e32146f770efd7a11de01fac144a9c1d2980"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.543542 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" podUID="a5b101e4-a4f7-4c73-8327-e09cce07eb51" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.545142 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" event={"ID":"98cd0df5-f4a5-4515-80b5-d0ac625a527a","Type":"ContainerStarted","Data":"6cc950fba6c0201acb69fd5967525a340324685d02594ba2464268c004c9f263"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.557528 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" event={"ID":"b5925d20-e10a-4564-91f4-67acb55b2a01","Type":"ContainerStarted","Data":"d97246375cc6563d6366487fc9d96ad7fac2457e25f913a9b4834f8218f6512f"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.561183 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" event={"ID":"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10","Type":"ContainerStarted","Data":"dec7058555ff657af6836372d94419eea9c758fd44cca177738a2f195571f011"} Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.562676 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" event={"ID":"dd2fed26-0e5c-49e0-ad15-3936a13680e7","Type":"ContainerStarted","Data":"0ae9982fdd110dd68a2d8f771351430009dc3ab2e723645311ee8eb317c10764"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.568089 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" podUID="dd2fed26-0e5c-49e0-ad15-3936a13680e7" Dec 05 11:26:30 crc kubenswrapper[4728]: I1205 11:26:30.569213 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" event={"ID":"df0f8091-3107-4a49-9672-8332e4c1f8c0","Type":"ContainerStarted","Data":"08fda792be0852cdf2731f38dfa57bcd08c0c965d590591c4e3e3cede8e7d85f"} Dec 05 11:26:30 crc kubenswrapper[4728]: E1205 11:26:30.595891 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" podUID="df0f8091-3107-4a49-9672-8332e4c1f8c0" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.582869 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" podUID="c16435ec-544a-4d19-8667-925c045ecf61" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.582909 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" podUID="04a349f4-b388-4a9c-8dbc-54bd1fb46934" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.582992 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" podUID="a5b101e4-a4f7-4c73-8327-e09cce07eb51" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.584782 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" podUID="dd2fed26-0e5c-49e0-ad15-3936a13680e7" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.584903 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" podUID="5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.587159 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" podUID="df0f8091-3107-4a49-9672-8332e4c1f8c0" Dec 05 11:26:31 crc kubenswrapper[4728]: I1205 11:26:31.854093 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.854263 4728 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:31 crc kubenswrapper[4728]: E1205 11:26:31.854618 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert podName:a1c012ce-e23c-4235-b2b2-56306e3d4722 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:35.854593475 +0000 UTC m=+1129.996716168 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert") pod "infra-operator-controller-manager-57548d458d-c2s6r" (UID: "a1c012ce-e23c-4235-b2b2-56306e3d4722") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: I1205 11:26:32.057658 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.058107 4728 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.058217 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert podName:6ee43140-9d2f-42c8-917f-eaa028a8e1b1 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:36.058185784 +0000 UTC m=+1130.200308517 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" (UID: "6ee43140-9d2f-42c8-917f-eaa028a8e1b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: I1205 11:26:32.463093 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:32 crc kubenswrapper[4728]: I1205 11:26:32.463215 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.463306 4728 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.463387 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:36.463367338 +0000 UTC m=+1130.605490031 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "metrics-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.463417 4728 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:26:32 crc kubenswrapper[4728]: E1205 11:26:32.463483 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:36.46346154 +0000 UTC m=+1130.605584243 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "webhook-server-cert" not found Dec 05 11:26:35 crc kubenswrapper[4728]: I1205 11:26:35.911478 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:35 crc kubenswrapper[4728]: E1205 11:26:35.911686 4728 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:35 crc kubenswrapper[4728]: E1205 11:26:35.912009 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert podName:a1c012ce-e23c-4235-b2b2-56306e3d4722 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:43.911981724 +0000 UTC m=+1138.054104417 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert") pod "infra-operator-controller-manager-57548d458d-c2s6r" (UID: "a1c012ce-e23c-4235-b2b2-56306e3d4722") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: I1205 11:26:36.114744 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.114995 4728 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.115101 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert podName:6ee43140-9d2f-42c8-917f-eaa028a8e1b1 nodeName:}" failed. No retries permitted until 2025-12-05 11:26:44.115078 +0000 UTC m=+1138.257200733 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" (UID: "6ee43140-9d2f-42c8-917f-eaa028a8e1b1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: I1205 11:26:36.530653 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:36 crc kubenswrapper[4728]: I1205 11:26:36.530786 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.531032 4728 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.531097 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:44.531075625 +0000 UTC m=+1138.673198318 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "webhook-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.531625 4728 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:26:36 crc kubenswrapper[4728]: E1205 11:26:36.531735 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs podName:891e8e93-da9a-4b87-8e69-04fe149274cd nodeName:}" failed. No retries permitted until 2025-12-05 11:26:44.531711573 +0000 UTC m=+1138.673834276 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs") pod "openstack-operator-controller-manager-64b69b8785-cvs4m" (UID: "891e8e93-da9a-4b87-8e69-04fe149274cd") : secret "metrics-server-cert" not found Dec 05 11:26:41 crc kubenswrapper[4728]: E1205 11:26:41.495046 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 11:26:41 crc kubenswrapper[4728]: E1205 11:26:41.495735 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5gw8k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-6wcpq_openstack-operators(6f5ec4c9-95e8-43ea-a137-9c781e4f234f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:26:42 crc kubenswrapper[4728]: E1205 11:26:42.026360 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 11:26:42 crc kubenswrapper[4728]: E1205 11:26:42.026574 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dnsrq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-8fvcf_openstack-operators(45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:26:42 crc kubenswrapper[4728]: E1205 11:26:42.587971 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tnf6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-ghbcj_openstack-operators(403718e0-87fa-402a-844e-6b458a15b003): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:26:42 crc kubenswrapper[4728]: E1205 11:26:42.589672 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" podUID="403718e0-87fa-402a-844e-6b458a15b003" Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.687426 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" event={"ID":"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10","Type":"ContainerStarted","Data":"85e91561c4fa0d32d489dc564e1cccaab496b8be1e234ce1ba1329864a127e72"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.700045 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" event={"ID":"941ddd04-049e-4247-98c2-6ef2117c2c69","Type":"ContainerStarted","Data":"18f237a596b27225d5065565655e91e3b146a0e5ff6c1d75bc41b61bc10307af"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.703745 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" event={"ID":"34f5a6c5-a316-450d-83a1-affbdd4d2e0e","Type":"ContainerStarted","Data":"916e8eb94a0b0eac944b697a864b61fe990b19448396b3ee91dfa5ea0c557f1d"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.715072 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" event={"ID":"854e2a55-450f-48e7-93fb-fca327f4fd18","Type":"ContainerStarted","Data":"8531c876ad27b865a700b34a56c7e8ae38aaead682795501a4e0ddc50be67c3a"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.723475 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" event={"ID":"403718e0-87fa-402a-844e-6b458a15b003","Type":"ContainerStarted","Data":"c87de93315c7d5107a3b75f447007228f1076f789230e5e480f0733520001425"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.723577 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:42 crc kubenswrapper[4728]: E1205 11:26:42.729772 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" podUID="403718e0-87fa-402a-844e-6b458a15b003" Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.736348 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" event={"ID":"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04","Type":"ContainerStarted","Data":"76ca50614c13ec7bf3e362c630c0c0083523eada5f1798a79c44db5d9a48aaa5"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.739300 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" event={"ID":"b5925d20-e10a-4564-91f4-67acb55b2a01","Type":"ContainerStarted","Data":"0244961e0cd218007d7fab258fbe938a058ef40dc01dfb6b3c92be047b2fdd5b"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.747380 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" event={"ID":"5d689dc0-c7c8-4af2-8f4c-45863ab88b69","Type":"ContainerStarted","Data":"fdc0be394ab68fb5957b2d384a54cdcba7c044dc4da431add98e8b78c4b0085b"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.752983 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" event={"ID":"98cd0df5-f4a5-4515-80b5-d0ac625a527a","Type":"ContainerStarted","Data":"8a5f7d593a2745fa5f59fa489cf2b0df910cc87132922d83525af1dd32d4c008"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.757071 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" event={"ID":"3149306c-d64a-4bdf-994a-ecec0489e472","Type":"ContainerStarted","Data":"efd0af706d85214c047daaf192a3332488f235d7aedd822ee4901d77fc7fff4b"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.758416 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" event={"ID":"db8744a4-edde-4a54-85e9-05089f650ba0","Type":"ContainerStarted","Data":"16cf1ac8dfc3242501d86b366cf70cb3b18cdc8c0bf19f67080eb1b6fe5f3756"} Dec 05 11:26:42 crc kubenswrapper[4728]: I1205 11:26:42.770111 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" event={"ID":"d21af02e-d731-402f-aa09-1f705dc4e82b","Type":"ContainerStarted","Data":"fae1dddfffcd7341fe2224ecca1d7a54108d4612dbfd517529c0f0761d35fe3f"} Dec 05 11:26:43 crc kubenswrapper[4728]: E1205 11:26:43.778520 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" podUID="403718e0-87fa-402a-844e-6b458a15b003" Dec 05 11:26:43 crc kubenswrapper[4728]: I1205 11:26:43.945920 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:43 crc kubenswrapper[4728]: I1205 11:26:43.963473 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1c012ce-e23c-4235-b2b2-56306e3d4722-cert\") pod \"infra-operator-controller-manager-57548d458d-c2s6r\" (UID: \"a1c012ce-e23c-4235-b2b2-56306e3d4722\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.148891 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.154727 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6ee43140-9d2f-42c8-917f-eaa028a8e1b1-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm\" (UID: \"6ee43140-9d2f-42c8-917f-eaa028a8e1b1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.225469 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.311269 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.555672 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.555774 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.559607 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-webhook-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.559974 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/891e8e93-da9a-4b87-8e69-04fe149274cd-metrics-certs\") pod \"openstack-operator-controller-manager-64b69b8785-cvs4m\" (UID: \"891e8e93-da9a-4b87-8e69-04fe149274cd\") " pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:44 crc kubenswrapper[4728]: I1205 11:26:44.860200 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:26:48 crc kubenswrapper[4728]: I1205 11:26:48.546219 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" Dec 05 11:26:48 crc kubenswrapper[4728]: E1205 11:26:48.549293 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" podUID="403718e0-87fa-402a-844e-6b458a15b003" Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.161924 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m"] Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.477685 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm"] Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.506306 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r"] Dec 05 11:26:53 crc kubenswrapper[4728]: W1205 11:26:53.760964 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod891e8e93_da9a_4b87_8e69_04fe149274cd.slice/crio-e408e6f4f1d70c202540245a5b11f1277146a3783f4d2e64561e86c5d9f16f3b WatchSource:0}: Error finding container e408e6f4f1d70c202540245a5b11f1277146a3783f4d2e64561e86c5d9f16f3b: Status 404 returned error can't find the container with id e408e6f4f1d70c202540245a5b11f1277146a3783f4d2e64561e86c5d9f16f3b Dec 05 11:26:53 crc kubenswrapper[4728]: W1205 11:26:53.761885 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1c012ce_e23c_4235_b2b2_56306e3d4722.slice/crio-031dfe9fba195ab0e822929d55fa6b224e6411bee7a1a2aaf243d3a052836f87 WatchSource:0}: Error finding container 031dfe9fba195ab0e822929d55fa6b224e6411bee7a1a2aaf243d3a052836f87: Status 404 returned error can't find the container with id 031dfe9fba195ab0e822929d55fa6b224e6411bee7a1a2aaf243d3a052836f87 Dec 05 11:26:53 crc kubenswrapper[4728]: W1205 11:26:53.765818 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ee43140_9d2f_42c8_917f_eaa028a8e1b1.slice/crio-ee5656188a058ba0d4ad67f41511ad757bda205d26924498813939d5c6aa6bfd WatchSource:0}: Error finding container ee5656188a058ba0d4ad67f41511ad757bda205d26924498813939d5c6aa6bfd: Status 404 returned error can't find the container with id ee5656188a058ba0d4ad67f41511ad757bda205d26924498813939d5c6aa6bfd Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.850612 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" event={"ID":"6ee43140-9d2f-42c8-917f-eaa028a8e1b1","Type":"ContainerStarted","Data":"ee5656188a058ba0d4ad67f41511ad757bda205d26924498813939d5c6aa6bfd"} Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.851855 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" event={"ID":"891e8e93-da9a-4b87-8e69-04fe149274cd","Type":"ContainerStarted","Data":"e408e6f4f1d70c202540245a5b11f1277146a3783f4d2e64561e86c5d9f16f3b"} Dec 05 11:26:53 crc kubenswrapper[4728]: I1205 11:26:53.852883 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" event={"ID":"a1c012ce-e23c-4235-b2b2-56306e3d4722","Type":"ContainerStarted","Data":"031dfe9fba195ab0e822929d55fa6b224e6411bee7a1a2aaf243d3a052836f87"} Dec 05 11:26:59 crc kubenswrapper[4728]: E1205 11:26:59.680561 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" podUID="6f5ec4c9-95e8-43ea-a137-9c781e4f234f" Dec 05 11:27:00 crc kubenswrapper[4728]: E1205 11:27:00.258492 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" podUID="45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.397568 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" event={"ID":"891e8e93-da9a-4b87-8e69-04fe149274cd","Type":"ContainerStarted","Data":"bc2eb3b8ec196f20ce27108c9ca8fffb3294c63402d1c74de053050e01e6d15f"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.398477 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.402062 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" event={"ID":"a1c012ce-e23c-4235-b2b2-56306e3d4722","Type":"ContainerStarted","Data":"4a639f675cf606201f93e759c09de8926d1eb92ab39e79b2fd4b404d628c0c92"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.405202 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" event={"ID":"6f5ec4c9-95e8-43ea-a137-9c781e4f234f","Type":"ContainerStarted","Data":"41b64fd19e6bd7c47eefcb4e27b089fd0225cab2784def8083e2168959479e6e"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.416932 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" event={"ID":"34f5a6c5-a316-450d-83a1-affbdd4d2e0e","Type":"ContainerStarted","Data":"5486a5263eafc11328a2c656a6738a8a5066a72208ab4488e4a83aff5f7d7fae"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.418316 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.424768 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.424980 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" event={"ID":"04a349f4-b388-4a9c-8dbc-54bd1fb46934","Type":"ContainerStarted","Data":"e507d734b95ef7444fcd57bd66a2d224319fafff2dc784378e7b730e38f69791"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.455513 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" podStartSLOduration=32.455486981 podStartE2EDuration="32.455486981s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:27:00.4483747 +0000 UTC m=+1154.590497403" watchObservedRunningTime="2025-12-05 11:27:00.455486981 +0000 UTC m=+1154.597609674" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.455568 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" event={"ID":"854e2a55-450f-48e7-93fb-fca327f4fd18","Type":"ContainerStarted","Data":"cfb890f214c29957e9363f95a379ef9bccff883552f113c9cc76ab0c64ece502"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.457169 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.466930 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.482673 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" event={"ID":"6ee43140-9d2f-42c8-917f-eaa028a8e1b1","Type":"ContainerStarted","Data":"b4545955588f8cd603b4bdbe1efc27189e15fd695561961e6d1f93fd459d8de7"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.492682 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-2zvrw" podStartSLOduration=3.522746198 podStartE2EDuration="33.492666684s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.007600349 +0000 UTC m=+1123.149723042" lastFinishedPulling="2025-12-05 11:26:58.977520835 +0000 UTC m=+1153.119643528" observedRunningTime="2025-12-05 11:27:00.487073193 +0000 UTC m=+1154.629195886" watchObservedRunningTime="2025-12-05 11:27:00.492666684 +0000 UTC m=+1154.634789377" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.508752 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" event={"ID":"d21af02e-d731-402f-aa09-1f705dc4e82b","Type":"ContainerStarted","Data":"67baf71013f89c4227e9bb7688fd0294ccfb6b7a83b7e5794e633c647882f8b1"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.511841 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.516636 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.527148 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" event={"ID":"03a7d3e9-4e85-496e-963f-f0c1e7e4cf04","Type":"ContainerStarted","Data":"86675561cc38a1bdaed2f596f1cad156330e3068570b72f8947a2f182e5a87da"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.529064 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.564756 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.577160 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" event={"ID":"b5925d20-e10a-4564-91f4-67acb55b2a01","Type":"ContainerStarted","Data":"5ec0c0bb5dddeb66b9995e3113b7085a27e861aa4d5fe6a0dfe6e0c45a200497"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.578203 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.582978 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.600855 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sb7lq" podStartSLOduration=3.3411013609999998 podStartE2EDuration="32.60082984s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.792227403 +0000 UTC m=+1123.934350096" lastFinishedPulling="2025-12-05 11:26:59.051955882 +0000 UTC m=+1153.194078575" observedRunningTime="2025-12-05 11:27:00.584903861 +0000 UTC m=+1154.727026544" watchObservedRunningTime="2025-12-05 11:27:00.60082984 +0000 UTC m=+1154.742952533" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.636530 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" event={"ID":"3149306c-d64a-4bdf-994a-ecec0489e472","Type":"ContainerStarted","Data":"aad479da61d4247dbe06b8859b1ffbceb914bb57e3003b4d460d9c748a42d547"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.638383 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.641245 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pqmth" podStartSLOduration=4.373905526 podStartE2EDuration="32.641196938s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.74388169 +0000 UTC m=+1123.886004383" lastFinishedPulling="2025-12-05 11:26:58.011173102 +0000 UTC m=+1152.153295795" observedRunningTime="2025-12-05 11:27:00.61491206 +0000 UTC m=+1154.757034753" watchObservedRunningTime="2025-12-05 11:27:00.641196938 +0000 UTC m=+1154.783319651" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.643820 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n8x6r" podStartSLOduration=7.432072335 podStartE2EDuration="33.643787758s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.262563483 +0000 UTC m=+1123.404686176" lastFinishedPulling="2025-12-05 11:26:55.474278906 +0000 UTC m=+1149.616401599" observedRunningTime="2025-12-05 11:27:00.636194343 +0000 UTC m=+1154.778317036" watchObservedRunningTime="2025-12-05 11:27:00.643787758 +0000 UTC m=+1154.785910451" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.644357 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.651082 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" event={"ID":"941ddd04-049e-4247-98c2-6ef2117c2c69","Type":"ContainerStarted","Data":"9eefa9b980857adbdbaa90dc2f5c3cf97753d337e11cfe7729d4089f10ff3bc5"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.652152 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.655405 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.670144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" event={"ID":"dd2fed26-0e5c-49e0-ad15-3936a13680e7","Type":"ContainerStarted","Data":"f4fbf6156b43ce1f2dacdfb677c5803b45a790fdf95437e3c91510ca08cd8647"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.684180 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-t7lpc" podStartSLOduration=3.320726181 podStartE2EDuration="32.684160176s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.61111091 +0000 UTC m=+1123.753233603" lastFinishedPulling="2025-12-05 11:26:58.974544905 +0000 UTC m=+1153.116667598" observedRunningTime="2025-12-05 11:27:00.679923862 +0000 UTC m=+1154.822046575" watchObservedRunningTime="2025-12-05 11:27:00.684160176 +0000 UTC m=+1154.826282869" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.691122 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" event={"ID":"df0f8091-3107-4a49-9672-8332e4c1f8c0","Type":"ContainerStarted","Data":"c8ec50fb4fbfe03ffb716d94e5e4225f8c3afb3532eca6b371cf36ed6b5dd388"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.728470 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" event={"ID":"c16435ec-544a-4d19-8667-925c045ecf61","Type":"ContainerStarted","Data":"2d8861774a0d6d34e39ecfe8ee1fdac579ff817ef71d611f7f4d9329881a4106"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.772562 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-zqc5j" podStartSLOduration=3.681438186 podStartE2EDuration="33.772538579s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:28.909371161 +0000 UTC m=+1123.051493854" lastFinishedPulling="2025-12-05 11:26:59.000471544 +0000 UTC m=+1153.142594247" observedRunningTime="2025-12-05 11:27:00.765097489 +0000 UTC m=+1154.907220192" watchObservedRunningTime="2025-12-05 11:27:00.772538579 +0000 UTC m=+1154.914661272" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.774093 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-7jg64" podStartSLOduration=7.43414569 podStartE2EDuration="33.774087951s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.151599941 +0000 UTC m=+1123.293722634" lastFinishedPulling="2025-12-05 11:26:55.491542182 +0000 UTC m=+1149.633664895" observedRunningTime="2025-12-05 11:27:00.706477898 +0000 UTC m=+1154.848600621" watchObservedRunningTime="2025-12-05 11:27:00.774087951 +0000 UTC m=+1154.916210634" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.775244 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" event={"ID":"98cd0df5-f4a5-4515-80b5-d0ac625a527a","Type":"ContainerStarted","Data":"1480cc33e4c95cd58a824922cec4435744cebace7919f5b65bc6fab558a929c8"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.775960 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.783040 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.792387 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" event={"ID":"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6","Type":"ContainerStarted","Data":"dfbb62c3abcc6ed81320ec69525c62c2c3b58fe123424b357bc5908f0c567bd8"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.817969 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" event={"ID":"a5b101e4-a4f7-4c73-8327-e09cce07eb51","Type":"ContainerStarted","Data":"4f5134e08a99a2bdb8dbf30ea2c890884836133bc23a872f542cf3f982359e40"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.845075 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" event={"ID":"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d","Type":"ContainerStarted","Data":"acbfb9319541a3a6466cccdc5f15834f1cbe79ff832febe083b462f3404d72c0"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.865219 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-nch5j" podStartSLOduration=4.137595973 podStartE2EDuration="33.865199787s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.262543692 +0000 UTC m=+1123.404666385" lastFinishedPulling="2025-12-05 11:26:58.990147506 +0000 UTC m=+1153.132270199" observedRunningTime="2025-12-05 11:27:00.807210684 +0000 UTC m=+1154.949333397" watchObservedRunningTime="2025-12-05 11:27:00.865199787 +0000 UTC m=+1155.007322480" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.875741 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-sdqtl" podStartSLOduration=6.887655178 podStartE2EDuration="32.875722311s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.490970231 +0000 UTC m=+1123.633092934" lastFinishedPulling="2025-12-05 11:26:55.479037364 +0000 UTC m=+1149.621160067" observedRunningTime="2025-12-05 11:27:00.872738451 +0000 UTC m=+1155.014861144" watchObservedRunningTime="2025-12-05 11:27:00.875722311 +0000 UTC m=+1155.017845004" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.894520 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" event={"ID":"5d689dc0-c7c8-4af2-8f4c-45863ab88b69","Type":"ContainerStarted","Data":"e29e8364b1019df8ea6465a1361c618afa26c544c78555cb4112c1b73cd9bf94"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.895519 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.912234 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.950055 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" event={"ID":"ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10","Type":"ContainerStarted","Data":"de3aa0012885be94465e6fd3dd93c9fcb95deefaf0df0d8648db88d604480b0d"} Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.951261 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:27:00 crc kubenswrapper[4728]: I1205 11:27:00.958198 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.066218 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-btnkh" podStartSLOduration=3.935280149 podStartE2EDuration="34.066191336s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:28.868083707 +0000 UTC m=+1123.010206400" lastFinishedPulling="2025-12-05 11:26:58.998994894 +0000 UTC m=+1153.141117587" observedRunningTime="2025-12-05 11:27:01.012201291 +0000 UTC m=+1155.154323984" watchObservedRunningTime="2025-12-05 11:27:01.066191336 +0000 UTC m=+1155.208314029" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.066366 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-xjwhb" podStartSLOduration=8.101236946 podStartE2EDuration="34.066361291s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.468831994 +0000 UTC m=+1123.610954687" lastFinishedPulling="2025-12-05 11:26:55.433956339 +0000 UTC m=+1149.576079032" observedRunningTime="2025-12-05 11:27:01.061308025 +0000 UTC m=+1155.203430708" watchObservedRunningTime="2025-12-05 11:27:01.066361291 +0000 UTC m=+1155.208483984" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.959854 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" event={"ID":"6f5ec4c9-95e8-43ea-a137-9c781e4f234f","Type":"ContainerStarted","Data":"3bcb2b36d08c8807311985397bd892acc6b634d8c702672271c3fae2dbf27f30"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.960413 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.961807 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" event={"ID":"45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6","Type":"ContainerStarted","Data":"889f04fa08121e7c5a3d8de3e089cfd57ac75ac25d3dfba8761ea8a9030a44c6"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.962197 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.963639 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" event={"ID":"c16435ec-544a-4d19-8667-925c045ecf61","Type":"ContainerStarted","Data":"f509e5a767bcba721920dfcdeb16639af10a87ea11f462492c71e137821b3a28"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.965241 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" event={"ID":"df0f8091-3107-4a49-9672-8332e4c1f8c0","Type":"ContainerStarted","Data":"61bb4400d2df8bb6d3f8fe13bc0c10e3ece97d44b6c9e0d04d88a160cd048d74"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.965375 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.967165 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" event={"ID":"a1c012ce-e23c-4235-b2b2-56306e3d4722","Type":"ContainerStarted","Data":"63bd42787494b6d7bfca0162d4cee91dea274786884fb819f2ddbbc33e9b2da5"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.967267 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.982770 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" event={"ID":"5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d","Type":"ContainerStarted","Data":"49aa45f755ef682704b71115b8147c3416f07aa404919e5f176c9071a80665ec"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.983878 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.984729 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" podStartSLOduration=3.145195469 podStartE2EDuration="34.98470517s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.16158404 +0000 UTC m=+1123.303706733" lastFinishedPulling="2025-12-05 11:27:01.001093741 +0000 UTC m=+1155.143216434" observedRunningTime="2025-12-05 11:27:01.982888391 +0000 UTC m=+1156.125011104" watchObservedRunningTime="2025-12-05 11:27:01.98470517 +0000 UTC m=+1156.126827863" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.987113 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" event={"ID":"6ee43140-9d2f-42c8-917f-eaa028a8e1b1","Type":"ContainerStarted","Data":"74dfe8bdb9324cf5662df4c33b5ec57a14976df01cb43d24cb09e4a9a2d6c4fb"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.987959 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.989618 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" event={"ID":"db8744a4-edde-4a54-85e9-05089f650ba0","Type":"ContainerStarted","Data":"4c7b41d5f8d52398dcc570bcd0887ede2c1c91176a2c50e22c5b20b0626cdcaa"} Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.991168 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:27:01 crc kubenswrapper[4728]: I1205 11:27:01.991625 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.002685 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" event={"ID":"dd2fed26-0e5c-49e0-ad15-3936a13680e7","Type":"ContainerStarted","Data":"3ea0382a57534b73396b8b32a111be5ea8c82307b139912985cff2ba2d72fe54"} Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.003502 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.006008 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" event={"ID":"a5b101e4-a4f7-4c73-8327-e09cce07eb51","Type":"ContainerStarted","Data":"3f503b43b1103f72c8547a97c25a464e3f84ce7b217b6453cdf8aa4b6741ab5d"} Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.006032 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.010857 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" podStartSLOduration=6.631607815 podStartE2EDuration="35.010836095s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.633809762 +0000 UTC m=+1123.775932445" lastFinishedPulling="2025-12-05 11:26:58.013038032 +0000 UTC m=+1152.155160725" observedRunningTime="2025-12-05 11:27:02.009498369 +0000 UTC m=+1156.151621062" watchObservedRunningTime="2025-12-05 11:27:02.010836095 +0000 UTC m=+1156.152958788" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.032201 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" podStartSLOduration=29.762055495 podStartE2EDuration="35.03217882s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:53.76897195 +0000 UTC m=+1147.911094643" lastFinishedPulling="2025-12-05 11:26:59.039095275 +0000 UTC m=+1153.181217968" observedRunningTime="2025-12-05 11:27:02.027750771 +0000 UTC m=+1156.169873464" watchObservedRunningTime="2025-12-05 11:27:02.03217882 +0000 UTC m=+1156.174301523" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.062547 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" podStartSLOduration=2.374727417 podStartE2EDuration="34.062523498s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.614688006 +0000 UTC m=+1123.756810699" lastFinishedPulling="2025-12-05 11:27:01.302484087 +0000 UTC m=+1155.444606780" observedRunningTime="2025-12-05 11:27:02.06074518 +0000 UTC m=+1156.202867893" watchObservedRunningTime="2025-12-05 11:27:02.062523498 +0000 UTC m=+1156.204646211" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.067447 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" podStartSLOduration=5.6328457069999995 podStartE2EDuration="34.06742934s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.627524312 +0000 UTC m=+1123.769646995" lastFinishedPulling="2025-12-05 11:26:58.062107935 +0000 UTC m=+1152.204230628" observedRunningTime="2025-12-05 11:27:02.046969299 +0000 UTC m=+1156.189091992" watchObservedRunningTime="2025-12-05 11:27:02.06742934 +0000 UTC m=+1156.209552033" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.077180 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" podStartSLOduration=5.738011673 podStartE2EDuration="34.077132272s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.748567536 +0000 UTC m=+1123.890690229" lastFinishedPulling="2025-12-05 11:26:58.087688135 +0000 UTC m=+1152.229810828" observedRunningTime="2025-12-05 11:27:02.076095254 +0000 UTC m=+1156.218217967" watchObservedRunningTime="2025-12-05 11:27:02.077132272 +0000 UTC m=+1156.219254965" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.106944 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-h6czw" podStartSLOduration=4.678272682 podStartE2EDuration="34.106922565s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.626646139 +0000 UTC m=+1123.768768842" lastFinishedPulling="2025-12-05 11:26:59.055296042 +0000 UTC m=+1153.197418725" observedRunningTime="2025-12-05 11:27:02.103644337 +0000 UTC m=+1156.245767030" watchObservedRunningTime="2025-12-05 11:27:02.106922565 +0000 UTC m=+1156.249045268" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.127843 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" podStartSLOduration=5.750445189 podStartE2EDuration="34.127822579s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.633846593 +0000 UTC m=+1123.775969286" lastFinishedPulling="2025-12-05 11:26:58.011223983 +0000 UTC m=+1152.153346676" observedRunningTime="2025-12-05 11:27:02.127416638 +0000 UTC m=+1156.269539331" watchObservedRunningTime="2025-12-05 11:27:02.127822579 +0000 UTC m=+1156.269945272" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.168172 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" podStartSLOduration=28.911741821 podStartE2EDuration="34.168148796s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:53.773491692 +0000 UTC m=+1147.915614385" lastFinishedPulling="2025-12-05 11:26:59.029898667 +0000 UTC m=+1153.172021360" observedRunningTime="2025-12-05 11:27:02.15273288 +0000 UTC m=+1156.294855583" watchObservedRunningTime="2025-12-05 11:27:02.168148796 +0000 UTC m=+1156.310271489" Dec 05 11:27:02 crc kubenswrapper[4728]: I1205 11:27:02.171148 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" podStartSLOduration=4.921658874 podStartE2EDuration="34.171141977s" podCreationTimestamp="2025-12-05 11:26:28 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.756000386 +0000 UTC m=+1123.898123079" lastFinishedPulling="2025-12-05 11:26:59.005483489 +0000 UTC m=+1153.147606182" observedRunningTime="2025-12-05 11:27:02.167021526 +0000 UTC m=+1156.309144219" watchObservedRunningTime="2025-12-05 11:27:02.171141977 +0000 UTC m=+1156.313264670" Dec 05 11:27:03 crc kubenswrapper[4728]: I1205 11:27:03.019230 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.024215 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" event={"ID":"403718e0-87fa-402a-844e-6b458a15b003","Type":"ContainerStarted","Data":"58f7703f6a4f977b7938b065704643b339aaea25650819f78523c3e72cd821cc"} Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.027176 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-sq989" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.027252 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-gbdj9" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.027838 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xnq92" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.052711 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-ghbcj" podStartSLOduration=24.878393408 podStartE2EDuration="37.052692814s" podCreationTimestamp="2025-12-05 11:26:27 +0000 UTC" firstStartedPulling="2025-12-05 11:26:29.294847373 +0000 UTC m=+1123.436970066" lastFinishedPulling="2025-12-05 11:26:41.469146779 +0000 UTC m=+1135.611269472" observedRunningTime="2025-12-05 11:27:04.051376048 +0000 UTC m=+1158.193498761" watchObservedRunningTime="2025-12-05 11:27:04.052692814 +0000 UTC m=+1158.194815527" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.232554 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-c2s6r" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.317613 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm" Dec 05 11:27:04 crc kubenswrapper[4728]: I1205 11:27:04.868372 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-64b69b8785-cvs4m" Dec 05 11:27:08 crc kubenswrapper[4728]: I1205 11:27:08.371857 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6wcpq" Dec 05 11:27:08 crc kubenswrapper[4728]: I1205 11:27:08.503915 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-dnlfm" Dec 05 11:27:08 crc kubenswrapper[4728]: I1205 11:27:08.621780 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8fvcf" Dec 05 11:27:08 crc kubenswrapper[4728]: I1205 11:27:08.907335 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-tc4ks" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.177260 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.179313 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.182649 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.182941 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-lrmpq" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.190279 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.229256 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pptk8\" (UniqueName: \"kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.229363 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.269941 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.272578 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.275412 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.286826 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.332492 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.332709 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.332757 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.332823 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8lq4\" (UniqueName: \"kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.332863 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pptk8\" (UniqueName: \"kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.333991 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.356489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pptk8\" (UniqueName: \"kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8\") pod \"dnsmasq-dns-675f4bcbfc-cn5ng\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.434271 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8lq4\" (UniqueName: \"kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.434459 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.434496 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.435603 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.435961 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.452719 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8lq4\" (UniqueName: \"kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4\") pod \"dnsmasq-dns-78dd6ddcc-g5hjp\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.501268 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.588596 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:27:24 crc kubenswrapper[4728]: I1205 11:27:24.926621 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:27:24 crc kubenswrapper[4728]: W1205 11:27:24.928011 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbc53181_47df_4cf7_9a9d_61c1624f12b3.slice/crio-2905f801559ae45130f9dc2dc2f4d03e7f67ceeb833c0d1ba1c4da9bb4a534c1 WatchSource:0}: Error finding container 2905f801559ae45130f9dc2dc2f4d03e7f67ceeb833c0d1ba1c4da9bb4a534c1: Status 404 returned error can't find the container with id 2905f801559ae45130f9dc2dc2f4d03e7f67ceeb833c0d1ba1c4da9bb4a534c1 Dec 05 11:27:25 crc kubenswrapper[4728]: I1205 11:27:25.019701 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:27:25 crc kubenswrapper[4728]: W1205 11:27:25.024011 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2282724a_a519_4a9f_a95b_4f515866c305.slice/crio-1ae34ba56694740ab01d48545c3ae16b5a3df2428de66bc40e94abaa188599a0 WatchSource:0}: Error finding container 1ae34ba56694740ab01d48545c3ae16b5a3df2428de66bc40e94abaa188599a0: Status 404 returned error can't find the container with id 1ae34ba56694740ab01d48545c3ae16b5a3df2428de66bc40e94abaa188599a0 Dec 05 11:27:25 crc kubenswrapper[4728]: I1205 11:27:25.187957 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" event={"ID":"bbc53181-47df-4cf7-9a9d-61c1624f12b3","Type":"ContainerStarted","Data":"2905f801559ae45130f9dc2dc2f4d03e7f67ceeb833c0d1ba1c4da9bb4a534c1"} Dec 05 11:27:25 crc kubenswrapper[4728]: I1205 11:27:25.193288 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" event={"ID":"2282724a-a519-4a9f-a95b-4f515866c305","Type":"ContainerStarted","Data":"1ae34ba56694740ab01d48545c3ae16b5a3df2428de66bc40e94abaa188599a0"} Dec 05 11:27:25 crc kubenswrapper[4728]: I1205 11:27:25.702338 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:27:25 crc kubenswrapper[4728]: I1205 11:27:25.702408 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.453866 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.477453 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.479039 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.497784 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.586005 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.586093 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.586135 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kztmh\" (UniqueName: \"kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.687251 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.687315 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.687346 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kztmh\" (UniqueName: \"kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.688841 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.689412 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.719613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kztmh\" (UniqueName: \"kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh\") pod \"dnsmasq-dns-666b6646f7-hsbkd\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.729836 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.758611 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.760452 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.770169 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.803589 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.894925 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.894980 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84fxc\" (UniqueName: \"kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.895033 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.996567 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.996658 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84fxc\" (UniqueName: \"kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.996721 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.998572 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:27 crc kubenswrapper[4728]: I1205 11:27:27.999092 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.026844 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84fxc\" (UniqueName: \"kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc\") pod \"dnsmasq-dns-57d769cc4f-mmkxw\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.080947 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.253042 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:27:28 crc kubenswrapper[4728]: W1205 11:27:28.264478 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6adb9c76_1002_4ec2_a93e_dc614fd45766.slice/crio-9dbbb16040e6161dcc6c9b1ed6850069da7c58d65db4d472571501f22fe3de24 WatchSource:0}: Error finding container 9dbbb16040e6161dcc6c9b1ed6850069da7c58d65db4d472571501f22fe3de24: Status 404 returned error can't find the container with id 9dbbb16040e6161dcc6c9b1ed6850069da7c58d65db4d472571501f22fe3de24 Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.518990 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.715007 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.716899 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.720613 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.720638 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b7kpg" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.720719 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.720840 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.720952 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.721134 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.721984 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.729034 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809116 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809168 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5spwk\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809331 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809416 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809443 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809494 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809510 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809614 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809665 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809711 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.809749 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910735 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910774 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5spwk\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910832 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910876 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910895 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910917 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910933 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910949 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910966 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.910988 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.911015 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.911507 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.911735 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.912337 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.913387 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.913567 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.922155 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.922374 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.923292 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.926641 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.928252 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.931028 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5spwk\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.949693 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " pod="openstack/rabbitmq-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.963529 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.965637 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.968726 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.969127 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.969848 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.968806 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.972113 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dgpzv" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.972256 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.972369 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 11:27:28 crc kubenswrapper[4728]: I1205 11:27:28.978092 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013237 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013288 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013317 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013348 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013374 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htbpw\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013413 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013431 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013453 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013481 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013501 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.013518 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.070893 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.115073 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htbpw\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.115180 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.115208 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.115258 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.115371 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.116727 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.117277 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.117820 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120262 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120577 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120746 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120859 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.120934 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.121011 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.121459 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.122054 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.126751 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.127047 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.127102 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.160173 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htbpw\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.163314 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.190884 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.258035 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" event={"ID":"6adb9c76-1002-4ec2-a93e-dc614fd45766","Type":"ContainerStarted","Data":"9dbbb16040e6161dcc6c9b1ed6850069da7c58d65db4d472571501f22fe3de24"} Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.265959 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" event={"ID":"ba33afa4-1aa3-408d-ba8c-c61f33aaa067","Type":"ContainerStarted","Data":"37bebf2fcde2c6ecaa3ad2fb3eed89e996f8479e2f3318bd0d944fe8886da1b7"} Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.341239 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.736531 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:27:29 crc kubenswrapper[4728]: I1205 11:27:29.843099 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:27:29 crc kubenswrapper[4728]: W1205 11:27:29.860118 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ffe010f_366a_454a_a8a3_639ed1cf0fdc.slice/crio-d9248dc63dfb7a8ae1da0eb7e96dd48756327c14299c126c63e96cf07952e471 WatchSource:0}: Error finding container d9248dc63dfb7a8ae1da0eb7e96dd48756327c14299c126c63e96cf07952e471: Status 404 returned error can't find the container with id d9248dc63dfb7a8ae1da0eb7e96dd48756327c14299c126c63e96cf07952e471 Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.260418 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.261772 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.273995 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.274224 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.274558 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.274947 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-z76pl" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.278562 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.292127 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.316592 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerStarted","Data":"f8e2896bb32c40a66c0616539bdd6169fe38a2e356dd764500d48f60b601e90b"} Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.319541 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerStarted","Data":"d9248dc63dfb7a8ae1da0eb7e96dd48756327c14299c126c63e96cf07952e471"} Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347134 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347175 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kolla-config\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347219 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-default\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347287 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347319 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmmrq\" (UniqueName: \"kubernetes.io/projected/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kube-api-access-jmmrq\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347344 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347363 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.347386 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455358 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455420 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455468 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455553 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455573 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kolla-config\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455666 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-default\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455752 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.455869 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmmrq\" (UniqueName: \"kubernetes.io/projected/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kube-api-access-jmmrq\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.456979 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.457255 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.460500 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kolla-config\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.461207 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.464150 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/08dfc6f9-aba5-4869-bdd3-7e3e33754318-config-data-default\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.484341 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.484963 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmmrq\" (UniqueName: \"kubernetes.io/projected/08dfc6f9-aba5-4869-bdd3-7e3e33754318-kube-api-access-jmmrq\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.489109 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08dfc6f9-aba5-4869-bdd3-7e3e33754318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.500677 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"08dfc6f9-aba5-4869-bdd3-7e3e33754318\") " pod="openstack/openstack-galera-0" Dec 05 11:27:30 crc kubenswrapper[4728]: I1205 11:27:30.616317 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.208184 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.339299 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"08dfc6f9-aba5-4869-bdd3-7e3e33754318","Type":"ContainerStarted","Data":"e10b4062a5cb3ceb2560e49eb85debbc795b8b5fbcb508204cb92239d77a3db4"} Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.592510 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.593983 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.597367 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-pmxmv" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.598496 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.599401 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.599684 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.602511 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.677859 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scn29\" (UniqueName: \"kubernetes.io/projected/d9cada47-64db-4c9c-8598-917e4099a8a6-kube-api-access-scn29\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678040 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678168 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678239 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678274 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678327 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678358 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.678399 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779548 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779619 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779724 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779742 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779769 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779808 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scn29\" (UniqueName: \"kubernetes.io/projected/d9cada47-64db-4c9c-8598-917e4099a8a6-kube-api-access-scn29\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.779860 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.780362 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.780902 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.782835 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.783325 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d9cada47-64db-4c9c-8598-917e4099a8a6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.783563 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d9cada47-64db-4c9c-8598-917e4099a8a6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.792681 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.792712 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9cada47-64db-4c9c-8598-917e4099a8a6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.810683 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scn29\" (UniqueName: \"kubernetes.io/projected/d9cada47-64db-4c9c-8598-917e4099a8a6-kube-api-access-scn29\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.825453 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d9cada47-64db-4c9c-8598-917e4099a8a6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.927880 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.930865 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.932047 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.941858 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-mdmzg" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.942085 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.941858 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.946433 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.985648 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-kolla-config\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.985690 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.985756 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-config-data\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.985812 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmbkg\" (UniqueName: \"kubernetes.io/projected/efbf5688-8330-4166-a93b-03dcf8ed578d-kube-api-access-hmbkg\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:31 crc kubenswrapper[4728]: I1205 11:27:31.985842 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.091192 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-config-data\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.091309 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmbkg\" (UniqueName: \"kubernetes.io/projected/efbf5688-8330-4166-a93b-03dcf8ed578d-kube-api-access-hmbkg\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.091363 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.091433 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-kolla-config\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.091453 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.092276 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-config-data\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.093198 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/efbf5688-8330-4166-a93b-03dcf8ed578d-kolla-config\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.096074 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.097369 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbf5688-8330-4166-a93b-03dcf8ed578d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.116812 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmbkg\" (UniqueName: \"kubernetes.io/projected/efbf5688-8330-4166-a93b-03dcf8ed578d-kube-api-access-hmbkg\") pod \"memcached-0\" (UID: \"efbf5688-8330-4166-a93b-03dcf8ed578d\") " pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.254266 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.570845 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:27:32 crc kubenswrapper[4728]: W1205 11:27:32.579759 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9cada47_64db_4c9c_8598_917e4099a8a6.slice/crio-f6888b46cd97816bc19cae1255f650434df3820fe5d67ae39298aea44964ff48 WatchSource:0}: Error finding container f6888b46cd97816bc19cae1255f650434df3820fe5d67ae39298aea44964ff48: Status 404 returned error can't find the container with id f6888b46cd97816bc19cae1255f650434df3820fe5d67ae39298aea44964ff48 Dec 05 11:27:32 crc kubenswrapper[4728]: I1205 11:27:32.583372 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.363868 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"efbf5688-8330-4166-a93b-03dcf8ed578d","Type":"ContainerStarted","Data":"3157e08cff6849ea83cac8afa0bbe4df14b8428ff45a0c5e2024daf68d3970a5"} Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.369693 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d9cada47-64db-4c9c-8598-917e4099a8a6","Type":"ContainerStarted","Data":"f6888b46cd97816bc19cae1255f650434df3820fe5d67ae39298aea44964ff48"} Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.848302 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.854054 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.856556 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-qmwbv" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.872345 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:33.931551 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx9dp\" (UniqueName: \"kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp\") pod \"kube-state-metrics-0\" (UID: \"d32b489f-b040-4f20-badc-ef587eeb0960\") " pod="openstack/kube-state-metrics-0" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:34.033078 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx9dp\" (UniqueName: \"kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp\") pod \"kube-state-metrics-0\" (UID: \"d32b489f-b040-4f20-badc-ef587eeb0960\") " pod="openstack/kube-state-metrics-0" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:34.100474 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx9dp\" (UniqueName: \"kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp\") pod \"kube-state-metrics-0\" (UID: \"d32b489f-b040-4f20-badc-ef587eeb0960\") " pod="openstack/kube-state-metrics-0" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:34.189133 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:27:34 crc kubenswrapper[4728]: I1205 11:27:34.706855 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.932539 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.934187 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.936044 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.936237 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.936253 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.937527 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.944122 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-pxvt7" Dec 05 11:27:37 crc kubenswrapper[4728]: I1205 11:27:37.947301 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005612 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-config\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005688 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005716 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005735 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005755 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvlp9\" (UniqueName: \"kubernetes.io/projected/de9c5f8c-fb05-44a5-804d-1f8f2129da92-kube-api-access-dvlp9\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005783 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.005896 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.106986 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108188 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108262 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-config\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108295 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108324 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108349 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108367 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvlp9\" (UniqueName: \"kubernetes.io/projected/de9c5f8c-fb05-44a5-804d-1f8f2129da92-kube-api-access-dvlp9\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108409 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108827 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.108991 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.109726 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.109787 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9c5f8c-fb05-44a5-804d-1f8f2129da92-config\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.114849 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.115581 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.128984 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9c5f8c-fb05-44a5-804d-1f8f2129da92-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.133392 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvlp9\" (UniqueName: \"kubernetes.io/projected/de9c5f8c-fb05-44a5-804d-1f8f2129da92-kube-api-access-dvlp9\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.141232 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"de9c5f8c-fb05-44a5-804d-1f8f2129da92\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.255588 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 11:27:38 crc kubenswrapper[4728]: W1205 11:27:38.431496 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd32b489f_b040_4f20_badc_ef587eeb0960.slice/crio-46c0e2b82741e525981d01205fe5f3ddde729eb1176514e2126cf9d20c72f4b2 WatchSource:0}: Error finding container 46c0e2b82741e525981d01205fe5f3ddde729eb1176514e2126cf9d20c72f4b2: Status 404 returned error can't find the container with id 46c0e2b82741e525981d01205fe5f3ddde729eb1176514e2126cf9d20c72f4b2 Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.857527 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-74pwl"] Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.858720 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.861070 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.861335 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.862036 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-24q6h" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.868292 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl"] Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.903166 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-csgtz"] Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.906372 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.914025 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-csgtz"] Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920771 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-combined-ca-bundle\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920855 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-log-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920906 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed80e7c1-b5a1-4606-b110-5d205dd122b4-scripts\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920945 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920960 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.920997 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-ovn-controller-tls-certs\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:38 crc kubenswrapper[4728]: I1205 11:27:38.921080 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnz5r\" (UniqueName: \"kubernetes.io/projected/ed80e7c1-b5a1-4606-b110-5d205dd122b4-kube-api-access-dnz5r\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023068 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-log\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023131 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a087e318-da4d-49e0-826e-198c5afc0a15-scripts\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023178 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvhbl\" (UniqueName: \"kubernetes.io/projected/a087e318-da4d-49e0-826e-198c5afc0a15-kube-api-access-xvhbl\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023301 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnz5r\" (UniqueName: \"kubernetes.io/projected/ed80e7c1-b5a1-4606-b110-5d205dd122b4-kube-api-access-dnz5r\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023355 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-etc-ovs\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023451 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-combined-ca-bundle\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023613 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-log-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023699 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed80e7c1-b5a1-4606-b110-5d205dd122b4-scripts\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023873 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-run\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023902 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023932 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.023999 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-ovn-controller-tls-certs\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.024027 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-lib\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.025073 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.025700 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-log-ovn\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.025731 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed80e7c1-b5a1-4606-b110-5d205dd122b4-var-run\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.027273 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-ovn-controller-tls-certs\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.027894 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed80e7c1-b5a1-4606-b110-5d205dd122b4-combined-ca-bundle\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.027890 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed80e7c1-b5a1-4606-b110-5d205dd122b4-scripts\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.041224 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnz5r\" (UniqueName: \"kubernetes.io/projected/ed80e7c1-b5a1-4606-b110-5d205dd122b4-kube-api-access-dnz5r\") pod \"ovn-controller-74pwl\" (UID: \"ed80e7c1-b5a1-4606-b110-5d205dd122b4\") " pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.125670 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-run\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.125771 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-lib\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.125779 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-run\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.125930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-log\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.125982 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a087e318-da4d-49e0-826e-198c5afc0a15-scripts\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.126026 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvhbl\" (UniqueName: \"kubernetes.io/projected/a087e318-da4d-49e0-826e-198c5afc0a15-kube-api-access-xvhbl\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.126050 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-etc-ovs\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.126076 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-lib\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.126305 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-etc-ovs\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.128916 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a087e318-da4d-49e0-826e-198c5afc0a15-var-log\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.129341 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a087e318-da4d-49e0-826e-198c5afc0a15-scripts\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.141123 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvhbl\" (UniqueName: \"kubernetes.io/projected/a087e318-da4d-49e0-826e-198c5afc0a15-kube-api-access-xvhbl\") pod \"ovn-controller-ovs-csgtz\" (UID: \"a087e318-da4d-49e0-826e-198c5afc0a15\") " pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.184939 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.229054 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:27:39 crc kubenswrapper[4728]: I1205 11:27:39.444001 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d32b489f-b040-4f20-badc-ef587eeb0960","Type":"ContainerStarted","Data":"46c0e2b82741e525981d01205fe5f3ddde729eb1176514e2126cf9d20c72f4b2"} Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.807132 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.808448 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.811717 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.812247 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.812260 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-j54vs" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.812496 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.820888 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968055 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968164 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968196 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968219 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968235 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968260 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cxk6\" (UniqueName: \"kubernetes.io/projected/269bbc54-5980-4de2-ac45-d1d7ff6335e9-kube-api-access-8cxk6\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968527 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-config\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:40 crc kubenswrapper[4728]: I1205 11:27:40.968626 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.069817 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-config\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.069907 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.069973 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070034 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070067 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070096 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070118 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070149 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cxk6\" (UniqueName: \"kubernetes.io/projected/269bbc54-5980-4de2-ac45-d1d7ff6335e9-kube-api-access-8cxk6\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070442 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.070622 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.071010 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-config\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.071885 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269bbc54-5980-4de2-ac45-d1d7ff6335e9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.077136 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.078201 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.087734 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cxk6\" (UniqueName: \"kubernetes.io/projected/269bbc54-5980-4de2-ac45-d1d7ff6335e9-kube-api-access-8cxk6\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.093866 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.098322 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269bbc54-5980-4de2-ac45-d1d7ff6335e9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"269bbc54-5980-4de2-ac45-d1d7ff6335e9\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:41 crc kubenswrapper[4728]: I1205 11:27:41.133959 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 11:27:55 crc kubenswrapper[4728]: I1205 11:27:55.702198 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:27:55 crc kubenswrapper[4728]: I1205 11:27:55.702834 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:27:57 crc kubenswrapper[4728]: E1205 11:27:57.481278 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 11:27:57 crc kubenswrapper[4728]: E1205 11:27:57.481621 4728 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 11:27:57 crc kubenswrapper[4728]: E1205 11:27:57.481770 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sx9dp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(d32b489f-b040-4f20-badc-ef587eeb0960): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" logger="UnhandledError" Dec 05 11:27:57 crc kubenswrapper[4728]: E1205 11:27:57.483147 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" Dec 05 11:27:57 crc kubenswrapper[4728]: E1205 11:27:57.622348 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" Dec 05 11:27:57 crc kubenswrapper[4728]: I1205 11:27:57.983769 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.233335 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.233780 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n55h95h8bh556hf6h55dh678h5f7h666hd8h55fh5c5h665h546h5d9h77h6dh6h66bh645h5d6h646h6h68fh64h668h64fh547h5dch5b5h9bh6cq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hmbkg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(efbf5688-8330-4166-a93b-03dcf8ed578d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.235055 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="efbf5688-8330-4166-a93b-03dcf8ed578d" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.625962 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="efbf5688-8330-4166-a93b-03dcf8ed578d" Dec 05 11:27:58 crc kubenswrapper[4728]: W1205 11:27:58.922547 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde9c5f8c_fb05_44a5_804d_1f8f2129da92.slice/crio-d57e05ba5fd1d94903948c0a9852f612536c44b3827cca107fe371be82160ea4 WatchSource:0}: Error finding container d57e05ba5fd1d94903948c0a9852f612536c44b3827cca107fe371be82160ea4: Status 404 returned error can't find the container with id d57e05ba5fd1d94903948c0a9852f612536c44b3827cca107fe371be82160ea4 Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.933712 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.934006 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x8lq4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-g5hjp_openstack(2282724a-a519-4a9f-a95b-4f515866c305): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.935166 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" podUID="2282724a-a519-4a9f-a95b-4f515866c305" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.973066 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.973205 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pptk8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-cn5ng_openstack(bbc53181-47df-4cf7-9a9d-61c1624f12b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.974495 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" podUID="bbc53181-47df-4cf7-9a9d-61c1624f12b3" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.975239 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.975339 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-84fxc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-mmkxw_openstack(ba33afa4-1aa3-408d-ba8c-c61f33aaa067): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.977052 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" podUID="ba33afa4-1aa3-408d-ba8c-c61f33aaa067" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.980619 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.980855 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kztmh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-hsbkd_openstack(6adb9c76-1002-4ec2-a93e-dc614fd45766): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:27:58 crc kubenswrapper[4728]: E1205 11:27:58.982301 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" podUID="6adb9c76-1002-4ec2-a93e-dc614fd45766" Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.502922 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.530451 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl"] Dec 05 11:27:59 crc kubenswrapper[4728]: W1205 11:27:59.531104 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded80e7c1_b5a1_4606_b110_5d205dd122b4.slice/crio-e8aaf2c40d671e685b5d643c85fa8d879b614ba319b517c8453ff9981b698783 WatchSource:0}: Error finding container e8aaf2c40d671e685b5d643c85fa8d879b614ba319b517c8453ff9981b698783: Status 404 returned error can't find the container with id e8aaf2c40d671e685b5d643c85fa8d879b614ba319b517c8453ff9981b698783 Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.631434 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"08dfc6f9-aba5-4869-bdd3-7e3e33754318","Type":"ContainerStarted","Data":"657b0aea4e5d6f756da0fc1539007ee88c144539a46869252361ca6e1375ca25"} Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.633948 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"de9c5f8c-fb05-44a5-804d-1f8f2129da92","Type":"ContainerStarted","Data":"d57e05ba5fd1d94903948c0a9852f612536c44b3827cca107fe371be82160ea4"} Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.635758 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d9cada47-64db-4c9c-8598-917e4099a8a6","Type":"ContainerStarted","Data":"1b0a2ed8fce2a587553a927c77053b0631efcf26f0b9efe9fbef338df713fcb5"} Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.637345 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"269bbc54-5980-4de2-ac45-d1d7ff6335e9","Type":"ContainerStarted","Data":"a62a1c3dbf924c30b38be50e3ba4392a0af2b0459d94dfd947b4a6760a9cd3c1"} Dec 05 11:27:59 crc kubenswrapper[4728]: I1205 11:27:59.639072 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl" event={"ID":"ed80e7c1-b5a1-4606-b110-5d205dd122b4","Type":"ContainerStarted","Data":"e8aaf2c40d671e685b5d643c85fa8d879b614ba319b517c8453ff9981b698783"} Dec 05 11:27:59 crc kubenswrapper[4728]: E1205 11:27:59.640337 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" podUID="6adb9c76-1002-4ec2-a93e-dc614fd45766" Dec 05 11:27:59 crc kubenswrapper[4728]: E1205 11:27:59.640578 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" podUID="ba33afa4-1aa3-408d-ba8c-c61f33aaa067" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.073350 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-csgtz"] Dec 05 11:28:00 crc kubenswrapper[4728]: W1205 11:28:00.081018 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda087e318_da4d_49e0_826e_198c5afc0a15.slice/crio-d3e48e2f5fc9c0421e749541ceb0ce25ab9fd287d88e59ef3910a2ca8cf7636d WatchSource:0}: Error finding container d3e48e2f5fc9c0421e749541ceb0ce25ab9fd287d88e59ef3910a2ca8cf7636d: Status 404 returned error can't find the container with id d3e48e2f5fc9c0421e749541ceb0ce25ab9fd287d88e59ef3910a2ca8cf7636d Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.162332 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.174296 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.315482 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8lq4\" (UniqueName: \"kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4\") pod \"2282724a-a519-4a9f-a95b-4f515866c305\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.315610 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config\") pod \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.315682 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config\") pod \"2282724a-a519-4a9f-a95b-4f515866c305\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.315709 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc\") pod \"2282724a-a519-4a9f-a95b-4f515866c305\" (UID: \"2282724a-a519-4a9f-a95b-4f515866c305\") " Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.316246 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2282724a-a519-4a9f-a95b-4f515866c305" (UID: "2282724a-a519-4a9f-a95b-4f515866c305"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.317005 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pptk8\" (UniqueName: \"kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8\") pod \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\" (UID: \"bbc53181-47df-4cf7-9a9d-61c1624f12b3\") " Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.317380 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.317415 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config" (OuterVolumeSpecName: "config") pod "bbc53181-47df-4cf7-9a9d-61c1624f12b3" (UID: "bbc53181-47df-4cf7-9a9d-61c1624f12b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.317547 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config" (OuterVolumeSpecName: "config") pod "2282724a-a519-4a9f-a95b-4f515866c305" (UID: "2282724a-a519-4a9f-a95b-4f515866c305"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.390484 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8" (OuterVolumeSpecName: "kube-api-access-pptk8") pod "bbc53181-47df-4cf7-9a9d-61c1624f12b3" (UID: "bbc53181-47df-4cf7-9a9d-61c1624f12b3"). InnerVolumeSpecName "kube-api-access-pptk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.390613 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4" (OuterVolumeSpecName: "kube-api-access-x8lq4") pod "2282724a-a519-4a9f-a95b-4f515866c305" (UID: "2282724a-a519-4a9f-a95b-4f515866c305"). InnerVolumeSpecName "kube-api-access-x8lq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.427928 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbc53181-47df-4cf7-9a9d-61c1624f12b3-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.427976 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2282724a-a519-4a9f-a95b-4f515866c305-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.427993 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pptk8\" (UniqueName: \"kubernetes.io/projected/bbc53181-47df-4cf7-9a9d-61c1624f12b3-kube-api-access-pptk8\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.428006 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8lq4\" (UniqueName: \"kubernetes.io/projected/2282724a-a519-4a9f-a95b-4f515866c305-kube-api-access-x8lq4\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.673952 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-5shht"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.675314 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.679934 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.684892 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5shht"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.690003 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" event={"ID":"2282724a-a519-4a9f-a95b-4f515866c305","Type":"ContainerDied","Data":"1ae34ba56694740ab01d48545c3ae16b5a3df2428de66bc40e94abaa188599a0"} Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.690086 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-g5hjp" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.692472 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerStarted","Data":"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e"} Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.729344 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerStarted","Data":"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf"} Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.733170 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" event={"ID":"bbc53181-47df-4cf7-9a9d-61c1624f12b3","Type":"ContainerDied","Data":"2905f801559ae45130f9dc2dc2f4d03e7f67ceeb833c0d1ba1c4da9bb4a534c1"} Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.733286 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cn5ng" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.754276 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csgtz" event={"ID":"a087e318-da4d-49e0-826e-198c5afc0a15","Type":"ContainerStarted","Data":"d3e48e2f5fc9c0421e749541ceb0ce25ab9fd287d88e59ef3910a2ca8cf7636d"} Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834193 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovn-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834239 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834266 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2h5l\" (UniqueName: \"kubernetes.io/projected/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-kube-api-access-j2h5l\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834438 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovs-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834464 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-config\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.834496 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-combined-ca-bundle\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.841056 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.858734 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.859393 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-g5hjp"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.873339 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.875219 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.880750 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.899521 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.919454 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.934084 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cn5ng"] Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.936789 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovs-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.936859 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvshv\" (UniqueName: \"kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.936918 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-config\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.936941 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-combined-ca-bundle\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.936962 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937014 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937202 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovn-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937223 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937259 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2h5l\" (UniqueName: \"kubernetes.io/projected/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-kube-api-access-j2h5l\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937295 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.937821 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovn-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.945775 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-ovs-rundir\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.946784 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-config\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.948374 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-combined-ca-bundle\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.951387 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.962378 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2h5l\" (UniqueName: \"kubernetes.io/projected/7e189ef3-6dab-4ce4-9cc3-b9bd409342ba-kube-api-access-j2h5l\") pod \"ovn-controller-metrics-5shht\" (UID: \"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba\") " pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:00 crc kubenswrapper[4728]: I1205 11:28:00.986691 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.041373 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.042764 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.045750 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.045832 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.045925 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.045972 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvshv\" (UniqueName: \"kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.047091 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.047645 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.049392 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.049923 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5shht" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.050732 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.069912 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvshv\" (UniqueName: \"kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv\") pod \"dnsmasq-dns-7fd796d7df-fqksc\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.076404 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.147353 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.147399 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.147460 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.147524 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.147546 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkwd2\" (UniqueName: \"kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.209759 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.251606 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.251651 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.251719 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.251770 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.251807 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkwd2\" (UniqueName: \"kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.253316 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.253352 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.254381 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.259021 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.274053 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkwd2\" (UniqueName: \"kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2\") pod \"dnsmasq-dns-86db49b7ff-mc7sj\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:01 crc kubenswrapper[4728]: I1205 11:28:01.400416 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.363762 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2282724a-a519-4a9f-a95b-4f515866c305" path="/var/lib/kubelet/pods/2282724a-a519-4a9f-a95b-4f515866c305/volumes" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.364371 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc53181-47df-4cf7-9a9d-61c1624f12b3" path="/var/lib/kubelet/pods/bbc53181-47df-4cf7-9a9d-61c1624f12b3/volumes" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.711064 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.724514 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.770268 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" event={"ID":"6adb9c76-1002-4ec2-a93e-dc614fd45766","Type":"ContainerDied","Data":"9dbbb16040e6161dcc6c9b1ed6850069da7c58d65db4d472571501f22fe3de24"} Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.770329 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-hsbkd" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.771380 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" event={"ID":"ba33afa4-1aa3-408d-ba8c-c61f33aaa067","Type":"ContainerDied","Data":"37bebf2fcde2c6ecaa3ad2fb3eed89e996f8479e2f3318bd0d944fe8886da1b7"} Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.771402 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mmkxw" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.780929 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc\") pod \"6adb9c76-1002-4ec2-a93e-dc614fd45766\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781032 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc\") pod \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781325 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6adb9c76-1002-4ec2-a93e-dc614fd45766" (UID: "6adb9c76-1002-4ec2-a93e-dc614fd45766"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781333 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba33afa4-1aa3-408d-ba8c-c61f33aaa067" (UID: "ba33afa4-1aa3-408d-ba8c-c61f33aaa067"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781429 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config\") pod \"6adb9c76-1002-4ec2-a93e-dc614fd45766\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781494 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84fxc\" (UniqueName: \"kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc\") pod \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.782120 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kztmh\" (UniqueName: \"kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh\") pod \"6adb9c76-1002-4ec2-a93e-dc614fd45766\" (UID: \"6adb9c76-1002-4ec2-a93e-dc614fd45766\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.782164 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config\") pod \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\" (UID: \"ba33afa4-1aa3-408d-ba8c-c61f33aaa067\") " Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.782473 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.782500 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.781872 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config" (OuterVolumeSpecName: "config") pod "6adb9c76-1002-4ec2-a93e-dc614fd45766" (UID: "6adb9c76-1002-4ec2-a93e-dc614fd45766"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.782751 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config" (OuterVolumeSpecName: "config") pod "ba33afa4-1aa3-408d-ba8c-c61f33aaa067" (UID: "ba33afa4-1aa3-408d-ba8c-c61f33aaa067"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.788342 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh" (OuterVolumeSpecName: "kube-api-access-kztmh") pod "6adb9c76-1002-4ec2-a93e-dc614fd45766" (UID: "6adb9c76-1002-4ec2-a93e-dc614fd45766"). InnerVolumeSpecName "kube-api-access-kztmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.788518 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc" (OuterVolumeSpecName: "kube-api-access-84fxc") pod "ba33afa4-1aa3-408d-ba8c-c61f33aaa067" (UID: "ba33afa4-1aa3-408d-ba8c-c61f33aaa067"). InnerVolumeSpecName "kube-api-access-84fxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.883556 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6adb9c76-1002-4ec2-a93e-dc614fd45766-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.883593 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84fxc\" (UniqueName: \"kubernetes.io/projected/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-kube-api-access-84fxc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.883603 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kztmh\" (UniqueName: \"kubernetes.io/projected/6adb9c76-1002-4ec2-a93e-dc614fd45766-kube-api-access-kztmh\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:02 crc kubenswrapper[4728]: I1205 11:28:02.883614 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba33afa4-1aa3-408d-ba8c-c61f33aaa067-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:03 crc kubenswrapper[4728]: E1205 11:28:03.014125 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08dfc6f9_aba5_4869_bdd3_7e3e33754318.slice/crio-657b0aea4e5d6f756da0fc1539007ee88c144539a46869252361ca6e1375ca25.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08dfc6f9_aba5_4869_bdd3_7e3e33754318.slice/crio-conmon-657b0aea4e5d6f756da0fc1539007ee88c144539a46869252361ca6e1375ca25.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.126984 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.132311 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mmkxw"] Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.162397 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.168317 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-hsbkd"] Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.808627 4728 generic.go:334] "Generic (PLEG): container finished" podID="08dfc6f9-aba5-4869-bdd3-7e3e33754318" containerID="657b0aea4e5d6f756da0fc1539007ee88c144539a46869252361ca6e1375ca25" exitCode=0 Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.808706 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"08dfc6f9-aba5-4869-bdd3-7e3e33754318","Type":"ContainerDied","Data":"657b0aea4e5d6f756da0fc1539007ee88c144539a46869252361ca6e1375ca25"} Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.812841 4728 generic.go:334] "Generic (PLEG): container finished" podID="d9cada47-64db-4c9c-8598-917e4099a8a6" containerID="1b0a2ed8fce2a587553a927c77053b0631efcf26f0b9efe9fbef338df713fcb5" exitCode=0 Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.812920 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d9cada47-64db-4c9c-8598-917e4099a8a6","Type":"ContainerDied","Data":"1b0a2ed8fce2a587553a927c77053b0631efcf26f0b9efe9fbef338df713fcb5"} Dec 05 11:28:03 crc kubenswrapper[4728]: I1205 11:28:03.958194 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5shht"] Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.020007 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.032466 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:04 crc kubenswrapper[4728]: W1205 11:28:04.037079 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3204df5_4f1c_4761_a5d6_38b09d27821b.slice/crio-4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c WatchSource:0}: Error finding container 4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c: Status 404 returned error can't find the container with id 4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c Dec 05 11:28:04 crc kubenswrapper[4728]: W1205 11:28:04.037631 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafc4bd20_529c_442f_a1a5_ed1b6fa92717.slice/crio-d820b1da3b0c42bc8986bcbcd591cf93eac65fbdb23d0e760f12e21206bcfd8f WatchSource:0}: Error finding container d820b1da3b0c42bc8986bcbcd591cf93eac65fbdb23d0e760f12e21206bcfd8f: Status 404 returned error can't find the container with id d820b1da3b0c42bc8986bcbcd591cf93eac65fbdb23d0e760f12e21206bcfd8f Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.371055 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6adb9c76-1002-4ec2-a93e-dc614fd45766" path="/var/lib/kubelet/pods/6adb9c76-1002-4ec2-a93e-dc614fd45766/volumes" Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.372056 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba33afa4-1aa3-408d-ba8c-c61f33aaa067" path="/var/lib/kubelet/pods/ba33afa4-1aa3-408d-ba8c-c61f33aaa067/volumes" Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.827271 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d9cada47-64db-4c9c-8598-917e4099a8a6","Type":"ContainerStarted","Data":"611c0e7346313da3d72bf83eaf0182052848d6ba7050e3463b418431e73c6f96"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.831232 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5shht" event={"ID":"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba","Type":"ContainerStarted","Data":"d5fa935c6b0fae137db73f36b1030f3f0d2dfbfd89868bf94d53760028e4ce5f"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.833495 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl" event={"ID":"ed80e7c1-b5a1-4606-b110-5d205dd122b4","Type":"ContainerStarted","Data":"050ecaf365087916ec61899241ceae512609599cfe445418fe4bd50152d35878"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.833641 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-74pwl" Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.835911 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" event={"ID":"c3204df5-4f1c-4761-a5d6-38b09d27821b","Type":"ContainerStarted","Data":"4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.838416 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"de9c5f8c-fb05-44a5-804d-1f8f2129da92","Type":"ContainerStarted","Data":"103933ac5c93e916f3532ffc8043fbd753267f6645864e2a02e09e29fc2298fd"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.839773 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"269bbc54-5980-4de2-ac45-d1d7ff6335e9","Type":"ContainerStarted","Data":"595c2862de70d6b821b8a817b6ff6af1a3e4db77ce2a01acebc51974845667db"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.841137 4728 generic.go:334] "Generic (PLEG): container finished" podID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerID="30aa2e03cbdca6f39887cf9186d8515fc6067d522e3457211ac275c6bfa4234b" exitCode=0 Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.841177 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" event={"ID":"afc4bd20-529c-442f-a1a5-ed1b6fa92717","Type":"ContainerDied","Data":"30aa2e03cbdca6f39887cf9186d8515fc6067d522e3457211ac275c6bfa4234b"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.841193 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" event={"ID":"afc4bd20-529c-442f-a1a5-ed1b6fa92717","Type":"ContainerStarted","Data":"d820b1da3b0c42bc8986bcbcd591cf93eac65fbdb23d0e760f12e21206bcfd8f"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.843413 4728 generic.go:334] "Generic (PLEG): container finished" podID="a087e318-da4d-49e0-826e-198c5afc0a15" containerID="136a2269371977c36ef71750ce7818c00fa154b5c91ee8af631e4ac47d77437e" exitCode=0 Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.843486 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csgtz" event={"ID":"a087e318-da4d-49e0-826e-198c5afc0a15","Type":"ContainerDied","Data":"136a2269371977c36ef71750ce7818c00fa154b5c91ee8af631e4ac47d77437e"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.851369 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"08dfc6f9-aba5-4869-bdd3-7e3e33754318","Type":"ContainerStarted","Data":"f4b9eab53503cbafc86fb835a08bb871f7c13e14453814a47304ec5578d3a69a"} Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.854692 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.521608346 podStartE2EDuration="34.854675409s" podCreationTimestamp="2025-12-05 11:27:30 +0000 UTC" firstStartedPulling="2025-12-05 11:27:32.586165183 +0000 UTC m=+1186.728287876" lastFinishedPulling="2025-12-05 11:27:58.919232246 +0000 UTC m=+1213.061354939" observedRunningTime="2025-12-05 11:28:04.851094772 +0000 UTC m=+1218.993217495" watchObservedRunningTime="2025-12-05 11:28:04.854675409 +0000 UTC m=+1218.996798102" Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.918455 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-74pwl" podStartSLOduration=22.902229008 podStartE2EDuration="26.918433417s" podCreationTimestamp="2025-12-05 11:27:38 +0000 UTC" firstStartedPulling="2025-12-05 11:27:59.533349843 +0000 UTC m=+1213.675472526" lastFinishedPulling="2025-12-05 11:28:03.549554242 +0000 UTC m=+1217.691676935" observedRunningTime="2025-12-05 11:28:04.910667958 +0000 UTC m=+1219.052790671" watchObservedRunningTime="2025-12-05 11:28:04.918433417 +0000 UTC m=+1219.060556110" Dec 05 11:28:04 crc kubenswrapper[4728]: I1205 11:28:04.940201 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.205659556 podStartE2EDuration="35.940180213s" podCreationTimestamp="2025-12-05 11:27:29 +0000 UTC" firstStartedPulling="2025-12-05 11:27:31.209547168 +0000 UTC m=+1185.351669851" lastFinishedPulling="2025-12-05 11:27:58.944067815 +0000 UTC m=+1213.086190508" observedRunningTime="2025-12-05 11:28:04.927054819 +0000 UTC m=+1219.069177512" watchObservedRunningTime="2025-12-05 11:28:04.940180213 +0000 UTC m=+1219.082302916" Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.865264 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csgtz" event={"ID":"a087e318-da4d-49e0-826e-198c5afc0a15","Type":"ContainerStarted","Data":"140adc372aff237e7a37fc41989d8a5b8d458d8f558fab0561c17b261650b010"} Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.866063 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.866077 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csgtz" event={"ID":"a087e318-da4d-49e0-826e-198c5afc0a15","Type":"ContainerStarted","Data":"73885863bb161877f491dcf1086253110f5ef51415daf3ca10a8c6601b1049dd"} Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.866093 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.867298 4728 generic.go:334] "Generic (PLEG): container finished" podID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerID="ddc748730de7d3e6670967eeb8de48ec58cea8c546f23933905ed5cdf4fece03" exitCode=0 Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.867368 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" event={"ID":"c3204df5-4f1c-4761-a5d6-38b09d27821b","Type":"ContainerDied","Data":"ddc748730de7d3e6670967eeb8de48ec58cea8c546f23933905ed5cdf4fece03"} Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.870352 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" event={"ID":"afc4bd20-529c-442f-a1a5-ed1b6fa92717","Type":"ContainerStarted","Data":"9fdf795d26164881c4d70781394ac0a1d8ffb9f36c0c863c3dfd5f302f30abfb"} Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.870665 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.891281 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-csgtz" podStartSLOduration=24.449444102 podStartE2EDuration="27.891261175s" podCreationTimestamp="2025-12-05 11:27:38 +0000 UTC" firstStartedPulling="2025-12-05 11:28:00.083896906 +0000 UTC m=+1214.226019589" lastFinishedPulling="2025-12-05 11:28:03.525713969 +0000 UTC m=+1217.667836662" observedRunningTime="2025-12-05 11:28:05.884419091 +0000 UTC m=+1220.026541804" watchObservedRunningTime="2025-12-05 11:28:05.891261175 +0000 UTC m=+1220.033383868" Dec 05 11:28:05 crc kubenswrapper[4728]: I1205 11:28:05.913943 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" podStartSLOduration=5.502851323 podStartE2EDuration="5.913927306s" podCreationTimestamp="2025-12-05 11:28:00 +0000 UTC" firstStartedPulling="2025-12-05 11:28:04.039891772 +0000 UTC m=+1218.182014465" lastFinishedPulling="2025-12-05 11:28:04.450967755 +0000 UTC m=+1218.593090448" observedRunningTime="2025-12-05 11:28:05.908854119 +0000 UTC m=+1220.050976832" watchObservedRunningTime="2025-12-05 11:28:05.913927306 +0000 UTC m=+1220.056050009" Dec 05 11:28:07 crc kubenswrapper[4728]: E1205 11:28:07.104859 4728 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.146:40038->38.102.83.146:35449: write tcp 38.102.83.146:40038->38.102.83.146:35449: write: broken pipe Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.886253 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" event={"ID":"c3204df5-4f1c-4761-a5d6-38b09d27821b","Type":"ContainerStarted","Data":"27d470dcc7679014f2b45cedab1436309956d85f523da34eb67469b51e0bc713"} Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.886756 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.888294 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"de9c5f8c-fb05-44a5-804d-1f8f2129da92","Type":"ContainerStarted","Data":"1a8abbf7d2df0000617271b8fbbd086f91fc923fdf45a3e7b8d5abf2d9d4a0a5"} Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.890530 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"269bbc54-5980-4de2-ac45-d1d7ff6335e9","Type":"ContainerStarted","Data":"206cffd8035880a7ebf252e21038d15e482016a753fea6f7ee7c74492c204b99"} Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.899395 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5shht" event={"ID":"7e189ef3-6dab-4ce4-9cc3-b9bd409342ba","Type":"ContainerStarted","Data":"3380c6147e30fe6c8112d16a090c88ee5279734f166d3575f2d5faf5fae95612"} Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.920673 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" podStartSLOduration=7.404075692 podStartE2EDuration="7.920648239s" podCreationTimestamp="2025-12-05 11:28:00 +0000 UTC" firstStartedPulling="2025-12-05 11:28:04.039264485 +0000 UTC m=+1218.181387178" lastFinishedPulling="2025-12-05 11:28:04.555837032 +0000 UTC m=+1218.697959725" observedRunningTime="2025-12-05 11:28:07.910514636 +0000 UTC m=+1222.052637349" watchObservedRunningTime="2025-12-05 11:28:07.920648239 +0000 UTC m=+1222.062770972" Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.937980 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=23.811585574 podStartE2EDuration="31.937957556s" podCreationTimestamp="2025-12-05 11:27:36 +0000 UTC" firstStartedPulling="2025-12-05 11:27:58.929375139 +0000 UTC m=+1213.071497832" lastFinishedPulling="2025-12-05 11:28:07.055747121 +0000 UTC m=+1221.197869814" observedRunningTime="2025-12-05 11:28:07.932768736 +0000 UTC m=+1222.074891499" watchObservedRunningTime="2025-12-05 11:28:07.937957556 +0000 UTC m=+1222.080080249" Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.962068 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-5shht" podStartSLOduration=4.916699421 podStartE2EDuration="7.962050735s" podCreationTimestamp="2025-12-05 11:28:00 +0000 UTC" firstStartedPulling="2025-12-05 11:28:03.968648321 +0000 UTC m=+1218.110771014" lastFinishedPulling="2025-12-05 11:28:07.013999635 +0000 UTC m=+1221.156122328" observedRunningTime="2025-12-05 11:28:07.959864926 +0000 UTC m=+1222.101987709" watchObservedRunningTime="2025-12-05 11:28:07.962050735 +0000 UTC m=+1222.104173428" Dec 05 11:28:07 crc kubenswrapper[4728]: I1205 11:28:07.991205 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=21.49017918 podStartE2EDuration="28.991184451s" podCreationTimestamp="2025-12-05 11:27:39 +0000 UTC" firstStartedPulling="2025-12-05 11:27:59.511065192 +0000 UTC m=+1213.653187885" lastFinishedPulling="2025-12-05 11:28:07.012070463 +0000 UTC m=+1221.154193156" observedRunningTime="2025-12-05 11:28:07.98447404 +0000 UTC m=+1222.126596743" watchObservedRunningTime="2025-12-05 11:28:07.991184451 +0000 UTC m=+1222.133307144" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.135251 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.184377 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.256235 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.256296 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.292850 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.906564 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.948681 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 11:28:08 crc kubenswrapper[4728]: I1205 11:28:08.955665 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.214614 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.215855 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.219099 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.219454 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-bw7q9" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.219689 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.219696 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.246622 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307502 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307610 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307649 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-config\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307674 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307700 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp7dp\" (UniqueName: \"kubernetes.io/projected/de655c8f-ba39-41bb-a5c0-c3195d4999ea-kube-api-access-tp7dp\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307737 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.307854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-scripts\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409543 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-config\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409588 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409605 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp7dp\" (UniqueName: \"kubernetes.io/projected/de655c8f-ba39-41bb-a5c0-c3195d4999ea-kube-api-access-tp7dp\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409635 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409665 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-scripts\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409722 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.409769 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.410574 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-config\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.410635 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.410877 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de655c8f-ba39-41bb-a5c0-c3195d4999ea-scripts\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.416443 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.416559 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.417211 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de655c8f-ba39-41bb-a5c0-c3195d4999ea-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.424876 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp7dp\" (UniqueName: \"kubernetes.io/projected/de655c8f-ba39-41bb-a5c0-c3195d4999ea-kube-api-access-tp7dp\") pod \"ovn-northd-0\" (UID: \"de655c8f-ba39-41bb-a5c0-c3195d4999ea\") " pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.540998 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 11:28:09 crc kubenswrapper[4728]: I1205 11:28:09.986585 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:28:10 crc kubenswrapper[4728]: I1205 11:28:10.617832 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 11:28:10 crc kubenswrapper[4728]: I1205 11:28:10.618098 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 11:28:10 crc kubenswrapper[4728]: I1205 11:28:10.704200 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 11:28:10 crc kubenswrapper[4728]: I1205 11:28:10.919772 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"de655c8f-ba39-41bb-a5c0-c3195d4999ea","Type":"ContainerStarted","Data":"efb0a3de42bb8acd0e7a69e1eaa5da11e36fc2bc5c0e4bcb2e51207d99c78dce"} Dec 05 11:28:10 crc kubenswrapper[4728]: I1205 11:28:10.992919 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.403034 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.458235 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.458686 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="dnsmasq-dns" containerID="cri-o://27d470dcc7679014f2b45cedab1436309956d85f523da34eb67469b51e0bc713" gracePeriod=10 Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.935283 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.935746 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.942637 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-a7df-account-create-update-jvkxk"] Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.943991 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.944450 4728 generic.go:334] "Generic (PLEG): container finished" podID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerID="27d470dcc7679014f2b45cedab1436309956d85f523da34eb67469b51e0bc713" exitCode=0 Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.944535 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" event={"ID":"c3204df5-4f1c-4761-a5d6-38b09d27821b","Type":"ContainerDied","Data":"27d470dcc7679014f2b45cedab1436309956d85f523da34eb67469b51e0bc713"} Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.944558 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" event={"ID":"c3204df5-4f1c-4761-a5d6-38b09d27821b","Type":"ContainerDied","Data":"4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c"} Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.944571 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c2a72bad9ddbdab24278be2b3e9283300afaeee9ac745e473f6eb282dff650c" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.947006 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.950292 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"de655c8f-ba39-41bb-a5c0-c3195d4999ea","Type":"ContainerStarted","Data":"216b625bb987c1ae13626e6ab670500ef223191417d7cdd1a529058b06b737ae"} Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.950346 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"de655c8f-ba39-41bb-a5c0-c3195d4999ea","Type":"ContainerStarted","Data":"031bbc4fcf05eab5cc9cdb36facca1872888ea447045e99c4a712277612e2326"} Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.951166 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.961400 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"efbf5688-8330-4166-a93b-03dcf8ed578d","Type":"ContainerStarted","Data":"fae6375b96a420906476b0e61913b5242632e8dec3ade318b05ba1572e3aa496"} Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.971519 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a7df-account-create-update-jvkxk"] Dec 05 11:28:11 crc kubenswrapper[4728]: I1205 11:28:11.988013 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.8279352709999999 podStartE2EDuration="2.987939275s" podCreationTimestamp="2025-12-05 11:28:09 +0000 UTC" firstStartedPulling="2025-12-05 11:28:09.996613328 +0000 UTC m=+1224.138736021" lastFinishedPulling="2025-12-05 11:28:11.156617332 +0000 UTC m=+1225.298740025" observedRunningTime="2025-12-05 11:28:11.977821682 +0000 UTC m=+1226.119944385" watchObservedRunningTime="2025-12-05 11:28:11.987939275 +0000 UTC m=+1226.130061968" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.005099 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.012481 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.855273599 podStartE2EDuration="41.012456896s" podCreationTimestamp="2025-12-05 11:27:31 +0000 UTC" firstStartedPulling="2025-12-05 11:27:32.600416877 +0000 UTC m=+1186.742539580" lastFinishedPulling="2025-12-05 11:28:10.757600184 +0000 UTC m=+1224.899722877" observedRunningTime="2025-12-05 11:28:11.997997746 +0000 UTC m=+1226.140120459" watchObservedRunningTime="2025-12-05 11:28:12.012456896 +0000 UTC m=+1226.154579589" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.036220 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-zb2z4"] Dec 05 11:28:12 crc kubenswrapper[4728]: E1205 11:28:12.041492 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="dnsmasq-dns" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.041532 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="dnsmasq-dns" Dec 05 11:28:12 crc kubenswrapper[4728]: E1205 11:28:12.041570 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="init" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.041580 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="init" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.044238 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" containerName="dnsmasq-dns" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.050168 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.059089 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.060462 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zb2z4"] Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.064777 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fkn8\" (UniqueName: \"kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.064977 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.169035 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb\") pod \"c3204df5-4f1c-4761-a5d6-38b09d27821b\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.169162 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config\") pod \"c3204df5-4f1c-4761-a5d6-38b09d27821b\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.169324 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvshv\" (UniqueName: \"kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv\") pod \"c3204df5-4f1c-4761-a5d6-38b09d27821b\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.169379 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc\") pod \"c3204df5-4f1c-4761-a5d6-38b09d27821b\" (UID: \"c3204df5-4f1c-4761-a5d6-38b09d27821b\") " Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.172259 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fkn8\" (UniqueName: \"kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.172342 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.172497 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgj6r\" (UniqueName: \"kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.172895 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.173730 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.189390 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv" (OuterVolumeSpecName: "kube-api-access-qvshv") pod "c3204df5-4f1c-4761-a5d6-38b09d27821b" (UID: "c3204df5-4f1c-4761-a5d6-38b09d27821b"). InnerVolumeSpecName "kube-api-access-qvshv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.195650 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fkn8\" (UniqueName: \"kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8\") pod \"keystone-db-create-zb2z4\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.234174 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-z24h9"] Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.236777 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.249169 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1b8d-account-create-update-fsdrh"] Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.250934 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.252933 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.255848 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.256944 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c3204df5-4f1c-4761-a5d6-38b09d27821b" (UID: "c3204df5-4f1c-4761-a5d6-38b09d27821b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.262825 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-z24h9"] Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.266082 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config" (OuterVolumeSpecName: "config") pod "c3204df5-4f1c-4761-a5d6-38b09d27821b" (UID: "c3204df5-4f1c-4761-a5d6-38b09d27821b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.268682 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c3204df5-4f1c-4761-a5d6-38b09d27821b" (UID: "c3204df5-4f1c-4761-a5d6-38b09d27821b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.277628 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.277975 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tksr\" (UniqueName: \"kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.278232 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.278420 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgj6r\" (UniqueName: \"kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.278625 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.278853 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkpml\" (UniqueName: \"kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.279143 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvshv\" (UniqueName: \"kubernetes.io/projected/c3204df5-4f1c-4761-a5d6-38b09d27821b-kube-api-access-qvshv\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.279229 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.279356 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.279516 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3204df5-4f1c-4761-a5d6-38b09d27821b-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.279460 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.293705 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1b8d-account-create-update-fsdrh"] Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.298344 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgj6r\" (UniqueName: \"kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r\") pod \"keystone-a7df-account-create-update-jvkxk\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.341410 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.368522 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.381014 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkpml\" (UniqueName: \"kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.381105 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tksr\" (UniqueName: \"kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.381133 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.381167 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.381974 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.383545 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.404926 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkpml\" (UniqueName: \"kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml\") pod \"placement-db-create-z24h9\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.405318 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tksr\" (UniqueName: \"kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr\") pod \"placement-1b8d-account-create-update-fsdrh\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.604817 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-z24h9" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.609397 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:12 crc kubenswrapper[4728]: I1205 11:28:12.972555 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fqksc" Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.027613 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.036935 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fqksc"] Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.068432 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.597244 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-z24h9"] Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.676703 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zb2z4"] Dec 05 11:28:13 crc kubenswrapper[4728]: I1205 11:28:13.734870 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a7df-account-create-update-jvkxk"] Dec 05 11:28:14 crc kubenswrapper[4728]: I1205 11:28:14.003022 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zb2z4" event={"ID":"9f832fc9-dbbb-4512-9ae8-431e113ce808","Type":"ContainerStarted","Data":"9c863d1e7defaa413ca8b1e48e719908bd7d69a9abb15a506a19495ddb468f7c"} Dec 05 11:28:14 crc kubenswrapper[4728]: I1205 11:28:14.008710 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-z24h9" event={"ID":"12d05504-fe2a-4fc5-a9c9-33bc47aca64f","Type":"ContainerStarted","Data":"b2c151a6d401ee518362b010da577ab9ad54e51202ee6de7dbb7ea101bb2efef"} Dec 05 11:28:14 crc kubenswrapper[4728]: I1205 11:28:14.010185 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a7df-account-create-update-jvkxk" event={"ID":"816b032e-4ef3-46dc-a6bb-17610cf07073","Type":"ContainerStarted","Data":"6094813d05c989f6b73d89a07db1bedd269070095d8d94fef18c3769cae9ad99"} Dec 05 11:28:14 crc kubenswrapper[4728]: I1205 11:28:14.367652 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3204df5-4f1c-4761-a5d6-38b09d27821b" path="/var/lib/kubelet/pods/c3204df5-4f1c-4761-a5d6-38b09d27821b/volumes" Dec 05 11:28:14 crc kubenswrapper[4728]: I1205 11:28:14.381703 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1b8d-account-create-update-fsdrh"] Dec 05 11:28:14 crc kubenswrapper[4728]: W1205 11:28:14.797349 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddec7fcaf_6110_4102_8c09_3d79fe7763b4.slice/crio-8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09 WatchSource:0}: Error finding container 8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09: Status 404 returned error can't find the container with id 8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09 Dec 05 11:28:15 crc kubenswrapper[4728]: I1205 11:28:15.020155 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1b8d-account-create-update-fsdrh" event={"ID":"dec7fcaf-6110-4102-8c09-3d79fe7763b4","Type":"ContainerStarted","Data":"8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.030866 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d32b489f-b040-4f20-badc-ef587eeb0960","Type":"ContainerStarted","Data":"387774b1662c767fedb047814be1a290942b34641a3dc916766205c410ad3532"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.033439 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.033587 4728 generic.go:334] "Generic (PLEG): container finished" podID="dec7fcaf-6110-4102-8c09-3d79fe7763b4" containerID="4f840465a966123a0aea5f3710dbb71cf794b11a30d9032b546ae8d19426040c" exitCode=0 Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.033659 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1b8d-account-create-update-fsdrh" event={"ID":"dec7fcaf-6110-4102-8c09-3d79fe7763b4","Type":"ContainerDied","Data":"4f840465a966123a0aea5f3710dbb71cf794b11a30d9032b546ae8d19426040c"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.035503 4728 generic.go:334] "Generic (PLEG): container finished" podID="816b032e-4ef3-46dc-a6bb-17610cf07073" containerID="f7b59debd9d7c29556742f504dcece99cda7b776a7adae156794d6e5184ff9f1" exitCode=0 Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.035584 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a7df-account-create-update-jvkxk" event={"ID":"816b032e-4ef3-46dc-a6bb-17610cf07073","Type":"ContainerDied","Data":"f7b59debd9d7c29556742f504dcece99cda7b776a7adae156794d6e5184ff9f1"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.037442 4728 generic.go:334] "Generic (PLEG): container finished" podID="9f832fc9-dbbb-4512-9ae8-431e113ce808" containerID="0d67782f0246919a000e3f2e6a2567bef7a0632e21bf5e21dd1530eff47a7209" exitCode=0 Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.037506 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zb2z4" event={"ID":"9f832fc9-dbbb-4512-9ae8-431e113ce808","Type":"ContainerDied","Data":"0d67782f0246919a000e3f2e6a2567bef7a0632e21bf5e21dd1530eff47a7209"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.039235 4728 generic.go:334] "Generic (PLEG): container finished" podID="12d05504-fe2a-4fc5-a9c9-33bc47aca64f" containerID="f5058486fa0b5940a687e46a12e68e178b6fd9bb8678782163812d56a1709489" exitCode=0 Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.039297 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-z24h9" event={"ID":"12d05504-fe2a-4fc5-a9c9-33bc47aca64f","Type":"ContainerDied","Data":"f5058486fa0b5940a687e46a12e68e178b6fd9bb8678782163812d56a1709489"} Dec 05 11:28:16 crc kubenswrapper[4728]: I1205 11:28:16.068648 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=6.602521897 podStartE2EDuration="43.068626762s" podCreationTimestamp="2025-12-05 11:27:33 +0000 UTC" firstStartedPulling="2025-12-05 11:27:38.43498545 +0000 UTC m=+1192.577108143" lastFinishedPulling="2025-12-05 11:28:14.901090315 +0000 UTC m=+1229.043213008" observedRunningTime="2025-12-05 11:28:16.05113229 +0000 UTC m=+1230.193254983" watchObservedRunningTime="2025-12-05 11:28:16.068626762 +0000 UTC m=+1230.210749465" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.256358 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.499935 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-pcvqf"] Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.501310 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.505605 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pcvqf"] Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.567359 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.576915 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-784a-account-create-update-c7xqd"] Dec 05 11:28:17 crc kubenswrapper[4728]: E1205 11:28:17.577620 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dec7fcaf-6110-4102-8c09-3d79fe7763b4" containerName="mariadb-account-create-update" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.577702 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dec7fcaf-6110-4102-8c09-3d79fe7763b4" containerName="mariadb-account-create-update" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.577982 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dec7fcaf-6110-4102-8c09-3d79fe7763b4" containerName="mariadb-account-create-update" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.578668 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.586122 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.592284 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-784a-account-create-update-c7xqd"] Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.695459 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts\") pod \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.695693 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tksr\" (UniqueName: \"kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr\") pod \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\" (UID: \"dec7fcaf-6110-4102-8c09-3d79fe7763b4\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.695923 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwgz4\" (UniqueName: \"kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.696003 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.696024 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.696080 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdqfj\" (UniqueName: \"kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.696264 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dec7fcaf-6110-4102-8c09-3d79fe7763b4" (UID: "dec7fcaf-6110-4102-8c09-3d79fe7763b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.746995 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr" (OuterVolumeSpecName: "kube-api-access-2tksr") pod "dec7fcaf-6110-4102-8c09-3d79fe7763b4" (UID: "dec7fcaf-6110-4102-8c09-3d79fe7763b4"). InnerVolumeSpecName "kube-api-access-2tksr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.801724 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.801768 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.801819 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdqfj\" (UniqueName: \"kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.801901 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwgz4\" (UniqueName: \"kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.803355 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dec7fcaf-6110-4102-8c09-3d79fe7763b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.804717 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.804992 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.804996 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tksr\" (UniqueName: \"kubernetes.io/projected/dec7fcaf-6110-4102-8c09-3d79fe7763b4-kube-api-access-2tksr\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.830502 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwgz4\" (UniqueName: \"kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4\") pod \"glance-784a-account-create-update-c7xqd\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.834725 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdqfj\" (UniqueName: \"kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj\") pod \"glance-db-create-pcvqf\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.870342 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-z24h9" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.878980 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.885490 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907586 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts\") pod \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907660 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkpml\" (UniqueName: \"kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml\") pod \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\" (UID: \"12d05504-fe2a-4fc5-a9c9-33bc47aca64f\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907691 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts\") pod \"816b032e-4ef3-46dc-a6bb-17610cf07073\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907738 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgj6r\" (UniqueName: \"kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r\") pod \"816b032e-4ef3-46dc-a6bb-17610cf07073\" (UID: \"816b032e-4ef3-46dc-a6bb-17610cf07073\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907875 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts\") pod \"9f832fc9-dbbb-4512-9ae8-431e113ce808\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.907914 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fkn8\" (UniqueName: \"kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8\") pod \"9f832fc9-dbbb-4512-9ae8-431e113ce808\" (UID: \"9f832fc9-dbbb-4512-9ae8-431e113ce808\") " Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.908828 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "816b032e-4ef3-46dc-a6bb-17610cf07073" (UID: "816b032e-4ef3-46dc-a6bb-17610cf07073"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.909260 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "12d05504-fe2a-4fc5-a9c9-33bc47aca64f" (UID: "12d05504-fe2a-4fc5-a9c9-33bc47aca64f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.911313 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9f832fc9-dbbb-4512-9ae8-431e113ce808" (UID: "9f832fc9-dbbb-4512-9ae8-431e113ce808"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.912880 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8" (OuterVolumeSpecName: "kube-api-access-6fkn8") pod "9f832fc9-dbbb-4512-9ae8-431e113ce808" (UID: "9f832fc9-dbbb-4512-9ae8-431e113ce808"). InnerVolumeSpecName "kube-api-access-6fkn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.912939 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml" (OuterVolumeSpecName: "kube-api-access-bkpml") pod "12d05504-fe2a-4fc5-a9c9-33bc47aca64f" (UID: "12d05504-fe2a-4fc5-a9c9-33bc47aca64f"). InnerVolumeSpecName "kube-api-access-bkpml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.912977 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r" (OuterVolumeSpecName: "kube-api-access-bgj6r") pod "816b032e-4ef3-46dc-a6bb-17610cf07073" (UID: "816b032e-4ef3-46dc-a6bb-17610cf07073"). InnerVolumeSpecName "kube-api-access-bgj6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.923148 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:17 crc kubenswrapper[4728]: I1205 11:28:17.933440 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023846 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f832fc9-dbbb-4512-9ae8-431e113ce808-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023888 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fkn8\" (UniqueName: \"kubernetes.io/projected/9f832fc9-dbbb-4512-9ae8-431e113ce808-kube-api-access-6fkn8\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023900 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023911 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkpml\" (UniqueName: \"kubernetes.io/projected/12d05504-fe2a-4fc5-a9c9-33bc47aca64f-kube-api-access-bkpml\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023921 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/816b032e-4ef3-46dc-a6bb-17610cf07073-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.023932 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgj6r\" (UniqueName: \"kubernetes.io/projected/816b032e-4ef3-46dc-a6bb-17610cf07073-kube-api-access-bgj6r\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.060195 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1b8d-account-create-update-fsdrh" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.060196 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1b8d-account-create-update-fsdrh" event={"ID":"dec7fcaf-6110-4102-8c09-3d79fe7763b4","Type":"ContainerDied","Data":"8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09"} Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.060654 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8594ff169c270566f542b9934bf4d9fb8f9e0049f195ed4e30e1921070f2bf09" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.061823 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a7df-account-create-update-jvkxk" event={"ID":"816b032e-4ef3-46dc-a6bb-17610cf07073","Type":"ContainerDied","Data":"6094813d05c989f6b73d89a07db1bedd269070095d8d94fef18c3769cae9ad99"} Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.061845 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6094813d05c989f6b73d89a07db1bedd269070095d8d94fef18c3769cae9ad99" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.061908 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a7df-account-create-update-jvkxk" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.064929 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zb2z4" event={"ID":"9f832fc9-dbbb-4512-9ae8-431e113ce808","Type":"ContainerDied","Data":"9c863d1e7defaa413ca8b1e48e719908bd7d69a9abb15a506a19495ddb468f7c"} Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.064957 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c863d1e7defaa413ca8b1e48e719908bd7d69a9abb15a506a19495ddb468f7c" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.065019 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zb2z4" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.068526 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-z24h9" event={"ID":"12d05504-fe2a-4fc5-a9c9-33bc47aca64f","Type":"ContainerDied","Data":"b2c151a6d401ee518362b010da577ab9ad54e51202ee6de7dbb7ea101bb2efef"} Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.068562 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2c151a6d401ee518362b010da577ab9ad54e51202ee6de7dbb7ea101bb2efef" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.068612 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-z24h9" Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.326840 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-784a-account-create-update-c7xqd"] Dec 05 11:28:18 crc kubenswrapper[4728]: W1205 11:28:18.330344 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd36a8b9f_7bab_4255_907f_4540e7a6b4ec.slice/crio-faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063 WatchSource:0}: Error finding container faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063: Status 404 returned error can't find the container with id faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063 Dec 05 11:28:18 crc kubenswrapper[4728]: I1205 11:28:18.393196 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pcvqf"] Dec 05 11:28:18 crc kubenswrapper[4728]: W1205 11:28:18.400953 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66929ba8_a892_4070_8c21_f47af196251a.slice/crio-366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247 WatchSource:0}: Error finding container 366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247: Status 404 returned error can't find the container with id 366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247 Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.080684 4728 generic.go:334] "Generic (PLEG): container finished" podID="66929ba8-a892-4070-8c21-f47af196251a" containerID="8f3976536cb3b048be931eae838acdb03578e01467663f7ce8db5e795904855d" exitCode=0 Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.080917 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pcvqf" event={"ID":"66929ba8-a892-4070-8c21-f47af196251a","Type":"ContainerDied","Data":"8f3976536cb3b048be931eae838acdb03578e01467663f7ce8db5e795904855d"} Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.081280 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pcvqf" event={"ID":"66929ba8-a892-4070-8c21-f47af196251a","Type":"ContainerStarted","Data":"366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247"} Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.083918 4728 generic.go:334] "Generic (PLEG): container finished" podID="d36a8b9f-7bab-4255-907f-4540e7a6b4ec" containerID="61f95e19797206de67bcb54946603504d30662b6db348dd7606a2d6bf47173e8" exitCode=0 Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.083964 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-784a-account-create-update-c7xqd" event={"ID":"d36a8b9f-7bab-4255-907f-4540e7a6b4ec","Type":"ContainerDied","Data":"61f95e19797206de67bcb54946603504d30662b6db348dd7606a2d6bf47173e8"} Dec 05 11:28:19 crc kubenswrapper[4728]: I1205 11:28:19.083992 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-784a-account-create-update-c7xqd" event={"ID":"d36a8b9f-7bab-4255-907f-4540e7a6b4ec","Type":"ContainerStarted","Data":"faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063"} Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.496391 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.606394 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.672918 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts\") pod \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.673138 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwgz4\" (UniqueName: \"kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4\") pod \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\" (UID: \"d36a8b9f-7bab-4255-907f-4540e7a6b4ec\") " Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.675292 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d36a8b9f-7bab-4255-907f-4540e7a6b4ec" (UID: "d36a8b9f-7bab-4255-907f-4540e7a6b4ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.682719 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4" (OuterVolumeSpecName: "kube-api-access-pwgz4") pod "d36a8b9f-7bab-4255-907f-4540e7a6b4ec" (UID: "d36a8b9f-7bab-4255-907f-4540e7a6b4ec"). InnerVolumeSpecName "kube-api-access-pwgz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.774771 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts\") pod \"66929ba8-a892-4070-8c21-f47af196251a\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.774851 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdqfj\" (UniqueName: \"kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj\") pod \"66929ba8-a892-4070-8c21-f47af196251a\" (UID: \"66929ba8-a892-4070-8c21-f47af196251a\") " Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.775313 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.775343 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwgz4\" (UniqueName: \"kubernetes.io/projected/d36a8b9f-7bab-4255-907f-4540e7a6b4ec-kube-api-access-pwgz4\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.775450 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66929ba8-a892-4070-8c21-f47af196251a" (UID: "66929ba8-a892-4070-8c21-f47af196251a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.779262 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj" (OuterVolumeSpecName: "kube-api-access-qdqfj") pod "66929ba8-a892-4070-8c21-f47af196251a" (UID: "66929ba8-a892-4070-8c21-f47af196251a"). InnerVolumeSpecName "kube-api-access-qdqfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.876903 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66929ba8-a892-4070-8c21-f47af196251a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:20 crc kubenswrapper[4728]: I1205 11:28:20.876948 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdqfj\" (UniqueName: \"kubernetes.io/projected/66929ba8-a892-4070-8c21-f47af196251a-kube-api-access-qdqfj\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.105325 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pcvqf" Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.105945 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pcvqf" event={"ID":"66929ba8-a892-4070-8c21-f47af196251a","Type":"ContainerDied","Data":"366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247"} Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.106083 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="366e6192b1e2b868523e47732fe575a105434a5b01bea7e96f4ad76044d4a247" Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.108378 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-784a-account-create-update-c7xqd" event={"ID":"d36a8b9f-7bab-4255-907f-4540e7a6b4ec","Type":"ContainerDied","Data":"faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063"} Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.108427 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="faa8db0aa9295fd625388a3c75f57937f3813906a8c60a6f66b91ef86698c063" Dec 05 11:28:21 crc kubenswrapper[4728]: I1205 11:28:21.108447 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-784a-account-create-update-c7xqd" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760198 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-8fpw9"] Dec 05 11:28:22 crc kubenswrapper[4728]: E1205 11:28:22.760810 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f832fc9-dbbb-4512-9ae8-431e113ce808" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760822 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f832fc9-dbbb-4512-9ae8-431e113ce808" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: E1205 11:28:22.760841 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d36a8b9f-7bab-4255-907f-4540e7a6b4ec" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760848 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d36a8b9f-7bab-4255-907f-4540e7a6b4ec" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: E1205 11:28:22.760861 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66929ba8-a892-4070-8c21-f47af196251a" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760868 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="66929ba8-a892-4070-8c21-f47af196251a" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: E1205 11:28:22.760879 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816b032e-4ef3-46dc-a6bb-17610cf07073" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760885 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="816b032e-4ef3-46dc-a6bb-17610cf07073" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: E1205 11:28:22.760896 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12d05504-fe2a-4fc5-a9c9-33bc47aca64f" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.760903 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="12d05504-fe2a-4fc5-a9c9-33bc47aca64f" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761093 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="66929ba8-a892-4070-8c21-f47af196251a" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761108 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="12d05504-fe2a-4fc5-a9c9-33bc47aca64f" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761121 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d36a8b9f-7bab-4255-907f-4540e7a6b4ec" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761135 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="816b032e-4ef3-46dc-a6bb-17610cf07073" containerName="mariadb-account-create-update" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761151 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f832fc9-dbbb-4512-9ae8-431e113ce808" containerName="mariadb-database-create" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.761774 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.766873 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.767885 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4w4f7" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.780367 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8fpw9"] Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.916954 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.917008 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.917031 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:22 crc kubenswrapper[4728]: I1205 11:28:22.917061 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzzfz\" (UniqueName: \"kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.019081 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.019150 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.019206 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.019247 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzzfz\" (UniqueName: \"kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.023343 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.023394 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.023497 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.038541 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzzfz\" (UniqueName: \"kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz\") pod \"glance-db-sync-8fpw9\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.084933 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8fpw9" Dec 05 11:28:23 crc kubenswrapper[4728]: I1205 11:28:23.609885 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8fpw9"] Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.140066 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8fpw9" event={"ID":"30ec6fee-e0fe-471a-a673-7856319a8dd8","Type":"ContainerStarted","Data":"ca7cfe6f02286d1e94c5e08b838a22fe050979f2de963deed628fc9ad1b66bbd"} Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.200971 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.397297 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.401781 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.405085 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.567846 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.568206 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.568233 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npnkh\" (UniqueName: \"kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.568268 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.568295 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.601740 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.669713 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.669764 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npnkh\" (UniqueName: \"kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.669818 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.669845 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.669892 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.671044 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.671213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.671530 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.671727 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.690708 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npnkh\" (UniqueName: \"kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh\") pod \"dnsmasq-dns-698758b865-8qq5x\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:24 crc kubenswrapper[4728]: I1205 11:28:24.741503 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.205643 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:28:25 crc kubenswrapper[4728]: W1205 11:28:25.214635 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41e42f3d_f948_4eb4_99cb_842f4e17c69c.slice/crio-74c1c3bb1d6fbaba5c2712c22502a0f5dc04c4161ea81d78b025b1f580a35442 WatchSource:0}: Error finding container 74c1c3bb1d6fbaba5c2712c22502a0f5dc04c4161ea81d78b025b1f580a35442: Status 404 returned error can't find the container with id 74c1c3bb1d6fbaba5c2712c22502a0f5dc04c4161ea81d78b025b1f580a35442 Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.489497 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.495168 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.497982 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.498371 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.498526 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-v855g" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.502027 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.516591 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.684627 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.684688 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4942\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-kube-api-access-w4942\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.684760 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-lock\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.684810 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-cache\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.684842 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.701754 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.701902 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.702041 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.703574 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.703653 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995" gracePeriod=600 Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.787399 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-lock\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.787469 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.787495 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-cache\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.787602 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.787629 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4942\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-kube-api-access-w4942\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: E1205 11:28:25.787723 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:25 crc kubenswrapper[4728]: E1205 11:28:25.787760 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:25 crc kubenswrapper[4728]: E1205 11:28:25.787867 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:26.287847198 +0000 UTC m=+1240.429969891 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.788133 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.788186 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-lock\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.788727 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-cache\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.815525 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4942\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-kube-api-access-w4942\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:25 crc kubenswrapper[4728]: I1205 11:28:25.818827 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.023686 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-mwr89"] Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.024710 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.027380 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.027401 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.028420 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.048404 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mwr89"] Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.162542 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-8qq5x" event={"ID":"41e42f3d-f948-4eb4-99cb-842f4e17c69c","Type":"ContainerStarted","Data":"74c1c3bb1d6fbaba5c2712c22502a0f5dc04c4161ea81d78b025b1f580a35442"} Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.196360 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.196519 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.196579 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2sd6\" (UniqueName: \"kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.196861 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.196970 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.197062 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.197131 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302680 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302743 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2sd6\" (UniqueName: \"kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302781 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302839 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302867 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302900 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302928 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.302994 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.303586 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: E1205 11:28:26.304104 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:26 crc kubenswrapper[4728]: E1205 11:28:26.304138 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:26 crc kubenswrapper[4728]: E1205 11:28:26.304200 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:27.304178465 +0000 UTC m=+1241.446301248 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.304443 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.304643 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.307286 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.308048 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.318082 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.321329 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2sd6\" (UniqueName: \"kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6\") pod \"swift-ring-rebalance-mwr89\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.344274 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:26 crc kubenswrapper[4728]: I1205 11:28:26.773273 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mwr89"] Dec 05 11:28:27 crc kubenswrapper[4728]: I1205 11:28:27.176514 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mwr89" event={"ID":"f047ba61-512e-4899-95ec-2dd4a1862858","Type":"ContainerStarted","Data":"503436b404382189f73ff9053ea575f5bf615e7345d075936b781573fbb66b7f"} Dec 05 11:28:27 crc kubenswrapper[4728]: I1205 11:28:27.339201 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:27 crc kubenswrapper[4728]: E1205 11:28:27.339449 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:27 crc kubenswrapper[4728]: E1205 11:28:27.339497 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:27 crc kubenswrapper[4728]: E1205 11:28:27.339595 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:29.339564829 +0000 UTC m=+1243.481687552 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:28 crc kubenswrapper[4728]: I1205 11:28:28.186839 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995" exitCode=0 Dec 05 11:28:28 crc kubenswrapper[4728]: I1205 11:28:28.186892 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995"} Dec 05 11:28:28 crc kubenswrapper[4728]: I1205 11:28:28.186934 4728 scope.go:117] "RemoveContainer" containerID="6bf852478fb7f0ca84e27842bbcba629a54bb2c604960749055643fd058725b9" Dec 05 11:28:28 crc kubenswrapper[4728]: I1205 11:28:28.189529 4728 generic.go:334] "Generic (PLEG): container finished" podID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerID="0490cf24113cf1667977d9e6060029a15dd491d52411b265b412b902db5c3d1e" exitCode=0 Dec 05 11:28:28 crc kubenswrapper[4728]: I1205 11:28:28.189580 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-8qq5x" event={"ID":"41e42f3d-f948-4eb4-99cb-842f4e17c69c","Type":"ContainerDied","Data":"0490cf24113cf1667977d9e6060029a15dd491d52411b265b412b902db5c3d1e"} Dec 05 11:28:29 crc kubenswrapper[4728]: I1205 11:28:29.199294 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-8qq5x" event={"ID":"41e42f3d-f948-4eb4-99cb-842f4e17c69c","Type":"ContainerStarted","Data":"20298d372b55710c9d6c93f1d2dc85ecfe33aa32e270c2a1172aa7a7c9d13321"} Dec 05 11:28:29 crc kubenswrapper[4728]: I1205 11:28:29.199714 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:29 crc kubenswrapper[4728]: I1205 11:28:29.203608 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26"} Dec 05 11:28:29 crc kubenswrapper[4728]: I1205 11:28:29.224692 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podStartSLOduration=5.224677611 podStartE2EDuration="5.224677611s" podCreationTimestamp="2025-12-05 11:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:28:29.221759814 +0000 UTC m=+1243.363882507" watchObservedRunningTime="2025-12-05 11:28:29.224677611 +0000 UTC m=+1243.366800304" Dec 05 11:28:29 crc kubenswrapper[4728]: I1205 11:28:29.375835 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:29 crc kubenswrapper[4728]: E1205 11:28:29.376050 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:29 crc kubenswrapper[4728]: E1205 11:28:29.376083 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:29 crc kubenswrapper[4728]: E1205 11:28:29.376143 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:33.376125313 +0000 UTC m=+1247.518248006 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:33 crc kubenswrapper[4728]: I1205 11:28:33.250174 4728 generic.go:334] "Generic (PLEG): container finished" podID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerID="d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf" exitCode=0 Dec 05 11:28:33 crc kubenswrapper[4728]: I1205 11:28:33.250336 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerDied","Data":"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf"} Dec 05 11:28:33 crc kubenswrapper[4728]: I1205 11:28:33.255367 4728 generic.go:334] "Generic (PLEG): container finished" podID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerID="8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e" exitCode=0 Dec 05 11:28:33 crc kubenswrapper[4728]: I1205 11:28:33.255418 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerDied","Data":"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e"} Dec 05 11:28:33 crc kubenswrapper[4728]: I1205 11:28:33.444485 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:33 crc kubenswrapper[4728]: E1205 11:28:33.444933 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:33 crc kubenswrapper[4728]: E1205 11:28:33.444960 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:33 crc kubenswrapper[4728]: E1205 11:28:33.445023 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:41.445002037 +0000 UTC m=+1255.587124730 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:34 crc kubenswrapper[4728]: I1205 11:28:34.223966 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-74pwl" podUID="ed80e7c1-b5a1-4606-b110-5d205dd122b4" containerName="ovn-controller" probeResult="failure" output=< Dec 05 11:28:34 crc kubenswrapper[4728]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 11:28:34 crc kubenswrapper[4728]: > Dec 05 11:28:34 crc kubenswrapper[4728]: I1205 11:28:34.743951 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:28:34 crc kubenswrapper[4728]: I1205 11:28:34.796977 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:34 crc kubenswrapper[4728]: I1205 11:28:34.797252 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" containerID="cri-o://9fdf795d26164881c4d70781394ac0a1d8ffb9f36c0c863c3dfd5f302f30abfb" gracePeriod=10 Dec 05 11:28:36 crc kubenswrapper[4728]: I1205 11:28:36.289584 4728 generic.go:334] "Generic (PLEG): container finished" podID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerID="9fdf795d26164881c4d70781394ac0a1d8ffb9f36c0c863c3dfd5f302f30abfb" exitCode=0 Dec 05 11:28:36 crc kubenswrapper[4728]: I1205 11:28:36.289675 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" event={"ID":"afc4bd20-529c-442f-a1a5-ed1b6fa92717","Type":"ContainerDied","Data":"9fdf795d26164881c4d70781394ac0a1d8ffb9f36c0c863c3dfd5f302f30abfb"} Dec 05 11:28:36 crc kubenswrapper[4728]: I1205 11:28:36.401610 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.223564 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-74pwl" podUID="ed80e7c1-b5a1-4606-b110-5d205dd122b4" containerName="ovn-controller" probeResult="failure" output=< Dec 05 11:28:39 crc kubenswrapper[4728]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 11:28:39 crc kubenswrapper[4728]: > Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.287847 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.302312 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-csgtz" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.645099 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-74pwl-config-8t2mv"] Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.646966 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.650669 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.655457 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl-config-8t2mv"] Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.773941 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsk96\" (UniqueName: \"kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.774047 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.774073 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.774170 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.774212 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.774230 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876234 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsk96\" (UniqueName: \"kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876356 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876390 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876477 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876540 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.876571 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.877059 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.877156 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.877223 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.878279 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.880263 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.899793 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsk96\" (UniqueName: \"kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96\") pod \"ovn-controller-74pwl-config-8t2mv\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:39 crc kubenswrapper[4728]: I1205 11:28:39.975567 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:41 crc kubenswrapper[4728]: I1205 11:28:41.515241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:41 crc kubenswrapper[4728]: E1205 11:28:41.515452 4728 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:28:41 crc kubenswrapper[4728]: E1205 11:28:41.515488 4728 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:28:41 crc kubenswrapper[4728]: E1205 11:28:41.515550 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift podName:a64af6ac-e922-435f-bee9-1cc7e7a95f4a nodeName:}" failed. No retries permitted until 2025-12-05 11:28:57.515525363 +0000 UTC m=+1271.657648056 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift") pod "swift-storage-0" (UID: "a64af6ac-e922-435f-bee9-1cc7e7a95f4a") : configmap "swift-ring-files" not found Dec 05 11:28:44 crc kubenswrapper[4728]: I1205 11:28:44.224983 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-74pwl" podUID="ed80e7c1-b5a1-4606-b110-5d205dd122b4" containerName="ovn-controller" probeResult="failure" output=< Dec 05 11:28:44 crc kubenswrapper[4728]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 11:28:44 crc kubenswrapper[4728]: > Dec 05 11:28:46 crc kubenswrapper[4728]: I1205 11:28:46.401303 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: i/o timeout" Dec 05 11:28:46 crc kubenswrapper[4728]: E1205 11:28:46.923490 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 05 11:28:46 crc kubenswrapper[4728]: E1205 11:28:46.923653 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fzzfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-8fpw9_openstack(30ec6fee-e0fe-471a-a673-7856319a8dd8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:28:46 crc kubenswrapper[4728]: E1205 11:28:46.924825 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-8fpw9" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.186984 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.343753 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwd2\" (UniqueName: \"kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.344089 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.344306 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.344388 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.344450 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.349230 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2" (OuterVolumeSpecName: "kube-api-access-jkwd2") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717"). InnerVolumeSpecName "kube-api-access-jkwd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.384493 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerStarted","Data":"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859"} Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.387389 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.388369 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" event={"ID":"afc4bd20-529c-442f-a1a5-ed1b6fa92717","Type":"ContainerDied","Data":"d820b1da3b0c42bc8986bcbcd591cf93eac65fbdb23d0e760f12e21206bcfd8f"} Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.388428 4728 scope.go:117] "RemoveContainer" containerID="9fdf795d26164881c4d70781394ac0a1d8ffb9f36c0c863c3dfd5f302f30abfb" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.388563 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.389335 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.390785 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.395398 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mwr89" event={"ID":"f047ba61-512e-4899-95ec-2dd4a1862858","Type":"ContainerStarted","Data":"1cf0f99bc6aea84d1d7a5c8e42c93cbb0867b3eed2646e7ba495c08aaf318a4a"} Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.397832 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerStarted","Data":"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77"} Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.398297 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 11:28:47 crc kubenswrapper[4728]: E1205 11:28:47.406471 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config podName:afc4bd20-529c-442f-a1a5-ed1b6fa92717 nodeName:}" failed. No retries permitted until 2025-12-05 11:28:47.90644384 +0000 UTC m=+1262.048566533 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717") : error deleting /var/lib/kubelet/pods/afc4bd20-529c-442f-a1a5-ed1b6fa92717/volume-subpaths: remove /var/lib/kubelet/pods/afc4bd20-529c-442f-a1a5-ed1b6fa92717/volume-subpaths: no such file or directory Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.406959 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.416081 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=51.326528404 podStartE2EDuration="1m20.416059204s" podCreationTimestamp="2025-12-05 11:27:27 +0000 UTC" firstStartedPulling="2025-12-05 11:27:29.854475344 +0000 UTC m=+1183.996598027" lastFinishedPulling="2025-12-05 11:27:58.944006124 +0000 UTC m=+1213.086128827" observedRunningTime="2025-12-05 11:28:47.412413648 +0000 UTC m=+1261.554536361" watchObservedRunningTime="2025-12-05 11:28:47.416059204 +0000 UTC m=+1261.558181917" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.418256 4728 scope.go:117] "RemoveContainer" containerID="30aa2e03cbdca6f39887cf9186d8515fc6067d522e3457211ac275c6bfa4234b" Dec 05 11:28:47 crc kubenswrapper[4728]: E1205 11:28:47.419927 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-8fpw9" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.432506 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl-config-8t2mv"] Dec 05 11:28:47 crc kubenswrapper[4728]: W1205 11:28:47.435911 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe4457a5_8949_4d08_9396_2b5ae6794bf1.slice/crio-cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f WatchSource:0}: Error finding container cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f: Status 404 returned error can't find the container with id cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.446571 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.446627 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.446643 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwd2\" (UniqueName: \"kubernetes.io/projected/afc4bd20-529c-442f-a1a5-ed1b6fa92717-kube-api-access-jkwd2\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.446656 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.461554 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=52.80483585 podStartE2EDuration="1m20.461535149s" podCreationTimestamp="2025-12-05 11:27:27 +0000 UTC" firstStartedPulling="2025-12-05 11:27:29.757088909 +0000 UTC m=+1183.899211602" lastFinishedPulling="2025-12-05 11:27:57.413788208 +0000 UTC m=+1211.555910901" observedRunningTime="2025-12-05 11:28:47.455000846 +0000 UTC m=+1261.597123549" watchObservedRunningTime="2025-12-05 11:28:47.461535149 +0000 UTC m=+1261.603657842" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.496410 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-mwr89" podStartSLOduration=1.326237956 podStartE2EDuration="21.496392512s" podCreationTimestamp="2025-12-05 11:28:26 +0000 UTC" firstStartedPulling="2025-12-05 11:28:26.792860508 +0000 UTC m=+1240.934983211" lastFinishedPulling="2025-12-05 11:28:46.963015044 +0000 UTC m=+1261.105137767" observedRunningTime="2025-12-05 11:28:47.494165243 +0000 UTC m=+1261.636287946" watchObservedRunningTime="2025-12-05 11:28:47.496392512 +0000 UTC m=+1261.638515205" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.957074 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") pod \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\" (UID: \"afc4bd20-529c-442f-a1a5-ed1b6fa92717\") " Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.957830 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config" (OuterVolumeSpecName: "config") pod "afc4bd20-529c-442f-a1a5-ed1b6fa92717" (UID: "afc4bd20-529c-442f-a1a5-ed1b6fa92717"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:47 crc kubenswrapper[4728]: I1205 11:28:47.958307 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc4bd20-529c-442f-a1a5-ed1b6fa92717-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.048186 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.055632 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-mc7sj"] Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.363666 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" path="/var/lib/kubelet/pods/afc4bd20-529c-442f-a1a5-ed1b6fa92717/volumes" Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.407296 4728 generic.go:334] "Generic (PLEG): container finished" podID="fe4457a5-8949-4d08-9396-2b5ae6794bf1" containerID="47ef8f7fa2d3a2d5c869bccc2658eed49d4bf5dc79b6d2dca95cda5443c3da52" exitCode=0 Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.407345 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-8t2mv" event={"ID":"fe4457a5-8949-4d08-9396-2b5ae6794bf1","Type":"ContainerDied","Data":"47ef8f7fa2d3a2d5c869bccc2658eed49d4bf5dc79b6d2dca95cda5443c3da52"} Dec 05 11:28:48 crc kubenswrapper[4728]: I1205 11:28:48.407398 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-8t2mv" event={"ID":"fe4457a5-8949-4d08-9396-2b5ae6794bf1","Type":"ContainerStarted","Data":"cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f"} Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.221880 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-74pwl" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.725148 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786449 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786537 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786582 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786581 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786666 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786709 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run" (OuterVolumeSpecName: "var-run") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsk96\" (UniqueName: \"kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.786850 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn\") pod \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\" (UID: \"fe4457a5-8949-4d08-9396-2b5ae6794bf1\") " Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787026 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787495 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787595 4728 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787624 4728 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787636 4728 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787650 4728 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe4457a5-8949-4d08-9396-2b5ae6794bf1-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.787851 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts" (OuterVolumeSpecName: "scripts") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.792679 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96" (OuterVolumeSpecName: "kube-api-access-wsk96") pod "fe4457a5-8949-4d08-9396-2b5ae6794bf1" (UID: "fe4457a5-8949-4d08-9396-2b5ae6794bf1"). InnerVolumeSpecName "kube-api-access-wsk96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.889270 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsk96\" (UniqueName: \"kubernetes.io/projected/fe4457a5-8949-4d08-9396-2b5ae6794bf1-kube-api-access-wsk96\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:49 crc kubenswrapper[4728]: I1205 11:28:49.889310 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe4457a5-8949-4d08-9396-2b5ae6794bf1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.434977 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-8t2mv" event={"ID":"fe4457a5-8949-4d08-9396-2b5ae6794bf1","Type":"ContainerDied","Data":"cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f"} Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.435021 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cce3315f9004488446c1d2e3a6bad69929d036fb12f0f64bf7bd65f19a8fca4f" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.435064 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-8t2mv" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.831251 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-74pwl-config-8t2mv"] Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.841626 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-74pwl-config-8t2mv"] Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930324 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-74pwl-config-mtw86"] Dec 05 11:28:50 crc kubenswrapper[4728]: E1205 11:28:50.930664 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="init" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930697 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="init" Dec 05 11:28:50 crc kubenswrapper[4728]: E1205 11:28:50.930714 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe4457a5-8949-4d08-9396-2b5ae6794bf1" containerName="ovn-config" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930720 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe4457a5-8949-4d08-9396-2b5ae6794bf1" containerName="ovn-config" Dec 05 11:28:50 crc kubenswrapper[4728]: E1205 11:28:50.930728 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930735 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930911 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.930922 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe4457a5-8949-4d08-9396-2b5ae6794bf1" containerName="ovn-config" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.931402 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.933340 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 11:28:50 crc kubenswrapper[4728]: I1205 11:28:50.944032 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl-config-mtw86"] Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010330 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010388 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4ksz\" (UniqueName: \"kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010510 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010585 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010623 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.010662 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112036 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112087 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112113 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112145 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112167 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4ksz\" (UniqueName: \"kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112240 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112443 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112445 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.112497 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.113020 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.113982 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.142580 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4ksz\" (UniqueName: \"kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz\") pod \"ovn-controller-74pwl-config-mtw86\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.312786 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.402919 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-mc7sj" podUID="afc4bd20-529c-442f-a1a5-ed1b6fa92717" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: i/o timeout" Dec 05 11:28:51 crc kubenswrapper[4728]: I1205 11:28:51.582162 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-74pwl-config-mtw86"] Dec 05 11:28:52 crc kubenswrapper[4728]: I1205 11:28:52.363543 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe4457a5-8949-4d08-9396-2b5ae6794bf1" path="/var/lib/kubelet/pods/fe4457a5-8949-4d08-9396-2b5ae6794bf1/volumes" Dec 05 11:28:52 crc kubenswrapper[4728]: I1205 11:28:52.455696 4728 generic.go:334] "Generic (PLEG): container finished" podID="cb6badae-947e-49a2-83a8-1927e95b5379" containerID="2708625e0fb8f19339b47c8fd505026f6227bbeb03e13b88299420c7f5747f22" exitCode=0 Dec 05 11:28:52 crc kubenswrapper[4728]: I1205 11:28:52.455741 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-mtw86" event={"ID":"cb6badae-947e-49a2-83a8-1927e95b5379","Type":"ContainerDied","Data":"2708625e0fb8f19339b47c8fd505026f6227bbeb03e13b88299420c7f5747f22"} Dec 05 11:28:52 crc kubenswrapper[4728]: I1205 11:28:52.455770 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-mtw86" event={"ID":"cb6badae-947e-49a2-83a8-1927e95b5379","Type":"ContainerStarted","Data":"c8b97126f7563f7f55ec05f8fe1d2a3338f7f09b19ef5f2c8829e4e1f7465c72"} Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.796470 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957560 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957664 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957689 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957755 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957855 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4ksz\" (UniqueName: \"kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957888 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957931 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957960 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts\") pod \"cb6badae-947e-49a2-83a8-1927e95b5379\" (UID: \"cb6badae-947e-49a2-83a8-1927e95b5379\") " Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.957987 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run" (OuterVolumeSpecName: "var-run") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.958539 4728 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.958569 4728 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.958586 4728 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cb6badae-947e-49a2-83a8-1927e95b5379-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.958600 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.958813 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts" (OuterVolumeSpecName: "scripts") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:53 crc kubenswrapper[4728]: I1205 11:28:53.977068 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz" (OuterVolumeSpecName: "kube-api-access-l4ksz") pod "cb6badae-947e-49a2-83a8-1927e95b5379" (UID: "cb6badae-947e-49a2-83a8-1927e95b5379"). InnerVolumeSpecName "kube-api-access-l4ksz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.060455 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4ksz\" (UniqueName: \"kubernetes.io/projected/cb6badae-947e-49a2-83a8-1927e95b5379-kube-api-access-l4ksz\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.060496 4728 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.060515 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb6badae-947e-49a2-83a8-1927e95b5379-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.472841 4728 generic.go:334] "Generic (PLEG): container finished" podID="f047ba61-512e-4899-95ec-2dd4a1862858" containerID="1cf0f99bc6aea84d1d7a5c8e42c93cbb0867b3eed2646e7ba495c08aaf318a4a" exitCode=0 Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.472937 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mwr89" event={"ID":"f047ba61-512e-4899-95ec-2dd4a1862858","Type":"ContainerDied","Data":"1cf0f99bc6aea84d1d7a5c8e42c93cbb0867b3eed2646e7ba495c08aaf318a4a"} Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.474627 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-74pwl-config-mtw86" event={"ID":"cb6badae-947e-49a2-83a8-1927e95b5379","Type":"ContainerDied","Data":"c8b97126f7563f7f55ec05f8fe1d2a3338f7f09b19ef5f2c8829e4e1f7465c72"} Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.474650 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8b97126f7563f7f55ec05f8fe1d2a3338f7f09b19ef5f2c8829e4e1f7465c72" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.474718 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-74pwl-config-mtw86" Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.899581 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-74pwl-config-mtw86"] Dec 05 11:28:54 crc kubenswrapper[4728]: I1205 11:28:54.908875 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-74pwl-config-mtw86"] Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.848393 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.891508 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.891571 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.891597 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.891613 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.891644 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2sd6\" (UniqueName: \"kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.892652 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.892685 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.992587 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.992646 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.993215 4728 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:55 crc kubenswrapper[4728]: I1205 11:28:55.993228 4728 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f047ba61-512e-4899-95ec-2dd4a1862858-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.350086 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6" (OuterVolumeSpecName: "kube-api-access-x2sd6") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "kube-api-access-x2sd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.354098 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:28:56 crc kubenswrapper[4728]: E1205 11:28:56.354288 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf podName:f047ba61-512e-4899-95ec-2dd4a1862858 nodeName:}" failed. No retries permitted until 2025-12-05 11:28:56.854251863 +0000 UTC m=+1270.996374636 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "swiftconf" (UniqueName: "kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858") : error deleting /var/lib/kubelet/pods/f047ba61-512e-4899-95ec-2dd4a1862858/volume-subpaths: remove /var/lib/kubelet/pods/f047ba61-512e-4899-95ec-2dd4a1862858/volume-subpaths: no such file or directory Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.354948 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts" (OuterVolumeSpecName: "scripts") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.358070 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.370423 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb6badae-947e-49a2-83a8-1927e95b5379" path="/var/lib/kubelet/pods/cb6badae-947e-49a2-83a8-1927e95b5379/volumes" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.400569 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2sd6\" (UniqueName: \"kubernetes.io/projected/f047ba61-512e-4899-95ec-2dd4a1862858-kube-api-access-x2sd6\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.400613 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f047ba61-512e-4899-95ec-2dd4a1862858-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.400633 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.400650 4728 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.491705 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mwr89" event={"ID":"f047ba61-512e-4899-95ec-2dd4a1862858","Type":"ContainerDied","Data":"503436b404382189f73ff9053ea575f5bf615e7345d075936b781573fbb66b7f"} Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.491749 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="503436b404382189f73ff9053ea575f5bf615e7345d075936b781573fbb66b7f" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.491755 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mwr89" Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.915188 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") pod \"f047ba61-512e-4899-95ec-2dd4a1862858\" (UID: \"f047ba61-512e-4899-95ec-2dd4a1862858\") " Dec 05 11:28:56 crc kubenswrapper[4728]: I1205 11:28:56.918550 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f047ba61-512e-4899-95ec-2dd4a1862858" (UID: "f047ba61-512e-4899-95ec-2dd4a1862858"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:28:57 crc kubenswrapper[4728]: I1205 11:28:57.016716 4728 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f047ba61-512e-4899-95ec-2dd4a1862858-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 11:28:57 crc kubenswrapper[4728]: I1205 11:28:57.524094 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:57 crc kubenswrapper[4728]: I1205 11:28:57.529770 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a64af6ac-e922-435f-bee9-1cc7e7a95f4a-etc-swift\") pod \"swift-storage-0\" (UID: \"a64af6ac-e922-435f-bee9-1cc7e7a95f4a\") " pod="openstack/swift-storage-0" Dec 05 11:28:57 crc kubenswrapper[4728]: I1205 11:28:57.649083 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 11:28:58 crc kubenswrapper[4728]: I1205 11:28:58.238577 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:28:58 crc kubenswrapper[4728]: W1205 11:28:58.241097 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda64af6ac_e922_435f_bee9_1cc7e7a95f4a.slice/crio-5bd098cbfcc9a34a9e24a579f439e5a9ed456ca99a157a48a8d1f71f56fa159b WatchSource:0}: Error finding container 5bd098cbfcc9a34a9e24a579f439e5a9ed456ca99a157a48a8d1f71f56fa159b: Status 404 returned error can't find the container with id 5bd098cbfcc9a34a9e24a579f439e5a9ed456ca99a157a48a8d1f71f56fa159b Dec 05 11:28:58 crc kubenswrapper[4728]: I1205 11:28:58.510905 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"5bd098cbfcc9a34a9e24a579f439e5a9ed456ca99a157a48a8d1f71f56fa159b"} Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.074033 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.350268 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.485171 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-214d-account-create-update-lpk9z"] Dec 05 11:28:59 crc kubenswrapper[4728]: E1205 11:28:59.485555 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f047ba61-512e-4899-95ec-2dd4a1862858" containerName="swift-ring-rebalance" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.485573 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f047ba61-512e-4899-95ec-2dd4a1862858" containerName="swift-ring-rebalance" Dec 05 11:28:59 crc kubenswrapper[4728]: E1205 11:28:59.485588 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6badae-947e-49a2-83a8-1927e95b5379" containerName="ovn-config" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.485597 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6badae-947e-49a2-83a8-1927e95b5379" containerName="ovn-config" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.485738 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6badae-947e-49a2-83a8-1927e95b5379" containerName="ovn-config" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.485752 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f047ba61-512e-4899-95ec-2dd4a1862858" containerName="swift-ring-rebalance" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.486292 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.489633 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.493934 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-qgcwk"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.495030 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.507398 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-214d-account-create-update-lpk9z"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.516865 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qgcwk"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.523768 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"4f9394ad977045fc6f2fab66247bba0fe5da571ef2fc9a7f675eb0a2cfa19e2e"} Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.581985 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-cblx8"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.583143 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.652874 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-cblx8"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.669312 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.669354 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.669387 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npqqd\" (UniqueName: \"kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.669408 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4m8b\" (UniqueName: \"kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.722858 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-lzfv7"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.731312 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzfv7" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.765166 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lzfv7"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771385 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771426 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771447 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771483 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npqqd\" (UniqueName: \"kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771508 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4m8b\" (UniqueName: \"kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.771557 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kgj4\" (UniqueName: \"kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.777872 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.783773 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.800426 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npqqd\" (UniqueName: \"kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd\") pod \"manila-214d-account-create-update-lpk9z\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.802131 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-09b6-account-create-update-j5949"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.803203 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.805359 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.811991 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4m8b\" (UniqueName: \"kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b\") pod \"cinder-db-create-qgcwk\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.814956 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-09b6-account-create-update-j5949"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.818025 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.833211 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qgcwk" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.872846 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8304-account-create-update-5bb5c"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.874164 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.874234 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kgj4\" (UniqueName: \"kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.874342 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.874395 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z85h6\" (UniqueName: \"kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.874456 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.876578 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.879693 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.895761 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8304-account-create-update-5bb5c"] Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.897560 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kgj4\" (UniqueName: \"kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4\") pod \"manila-db-create-cblx8\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.956689 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cblx8" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978269 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z85h6\" (UniqueName: \"kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978341 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9p96\" (UniqueName: \"kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978441 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978472 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978511 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9dqf\" (UniqueName: \"kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.978717 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:28:59 crc kubenswrapper[4728]: I1205 11:28:59.979503 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.017421 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z85h6\" (UniqueName: \"kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6\") pod \"barbican-db-create-lzfv7\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " pod="openstack/barbican-db-create-lzfv7" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.018584 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-g59p8"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.020139 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.030963 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g59p8"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.070287 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzfv7" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.080455 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9p96\" (UniqueName: \"kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.080543 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.080565 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.080596 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9dqf\" (UniqueName: \"kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.081523 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.082565 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.083517 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ca6c-account-create-update-j9pql"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.085387 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.092064 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.096362 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ca6c-account-create-update-j9pql"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.111724 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9dqf\" (UniqueName: \"kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf\") pod \"cinder-09b6-account-create-update-j5949\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.111929 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9p96\" (UniqueName: \"kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96\") pod \"barbican-8304-account-create-update-5bb5c\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.181591 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vntvq\" (UniqueName: \"kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.181876 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj5m2\" (UniqueName: \"kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.181981 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.182026 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.253697 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.264130 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.286602 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vntvq\" (UniqueName: \"kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.286647 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj5m2\" (UniqueName: \"kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.286741 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.286829 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.287494 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.288324 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.313371 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj5m2\" (UniqueName: \"kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2\") pod \"neutron-db-create-g59p8\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.313974 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vntvq\" (UniqueName: \"kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq\") pod \"neutron-ca6c-account-create-update-j9pql\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.388285 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.425752 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.521290 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-214d-account-create-update-lpk9z"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.550676 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lzfv7"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.572119 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qgcwk"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.592521 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"1c16f0f5ab2ef38f0337b01c2e48012848bea1477b2893e8d45cf3d91ab8b0c4"} Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.592582 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"d8cd57da59c602950c4dbe71a340f9192a70920a1c41cd807404481a6d44986b"} Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.592595 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"6d3f8538c4ea2f6eaf4f2735d06862e74a7b2590266598abb10c6bc465c7902e"} Dec 05 11:29:00 crc kubenswrapper[4728]: W1205 11:29:00.597555 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23d1273f_c19e_48d7_b792_0a6db00bc94d.slice/crio-f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9 WatchSource:0}: Error finding container f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9: Status 404 returned error can't find the container with id f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9 Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.601109 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzfv7" event={"ID":"b96bd60f-bf85-4263-9e18-c367e742c780","Type":"ContainerStarted","Data":"3dc11a987bd358e059e1dbceb6d39b239e3f1710f5051c19a1629f7f60f0b61b"} Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.604389 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-214d-account-create-update-lpk9z" event={"ID":"72c64530-79c6-4b13-a18f-70c0c1405d8f","Type":"ContainerStarted","Data":"87df3882800f796b4e9ae4dabeb09b80b9c58b59454ca5abd4862219c065305b"} Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.614782 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-cblx8"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.763618 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-09b6-account-create-update-j5949"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.819477 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8304-account-create-update-5bb5c"] Dec 05 11:29:00 crc kubenswrapper[4728]: W1205 11:29:00.831487 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc98ef602_49f4_4256_8a2d_b6a27ccaf903.slice/crio-7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104 WatchSource:0}: Error finding container 7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104: Status 404 returned error can't find the container with id 7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104 Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.861689 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-dmlfm"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.862844 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.864553 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.864942 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rz6qv" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.865051 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.865086 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.868811 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dmlfm"] Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.999156 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.999219 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:00 crc kubenswrapper[4728]: I1205 11:29:00.999257 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwnps\" (UniqueName: \"kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.079441 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ca6c-account-create-update-j9pql"] Dec 05 11:29:01 crc kubenswrapper[4728]: W1205 11:29:01.086256 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f53576d_4b2d_4269_bea7_c3deb0d3292d.slice/crio-527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba WatchSource:0}: Error finding container 527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba: Status 404 returned error can't find the container with id 527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.091962 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g59p8"] Dec 05 11:29:01 crc kubenswrapper[4728]: W1205 11:29:01.094084 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09a3c745_2007_4f55_b706_24f148fc1805.slice/crio-96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769 WatchSource:0}: Error finding container 96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769: Status 404 returned error can't find the container with id 96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.100526 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.100581 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.100617 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwnps\" (UniqueName: \"kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.110940 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.111073 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.122833 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwnps\" (UniqueName: \"kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps\") pod \"keystone-db-sync-dmlfm\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.193346 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.614951 4728 generic.go:334] "Generic (PLEG): container finished" podID="b96bd60f-bf85-4263-9e18-c367e742c780" containerID="1f2745aa7ff2d682ac614a2a12900c2d5859b7f9b0bae3fed435925e1ee28544" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.614995 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzfv7" event={"ID":"b96bd60f-bf85-4263-9e18-c367e742c780","Type":"ContainerDied","Data":"1f2745aa7ff2d682ac614a2a12900c2d5859b7f9b0bae3fed435925e1ee28544"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.617356 4728 generic.go:334] "Generic (PLEG): container finished" podID="c98ef602-49f4-4256-8a2d-b6a27ccaf903" containerID="fa8cd64a144b422c33122e9305d8132c9a166c3b62525c64f2f2645ea805ef2d" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.617424 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8304-account-create-update-5bb5c" event={"ID":"c98ef602-49f4-4256-8a2d-b6a27ccaf903","Type":"ContainerDied","Data":"fa8cd64a144b422c33122e9305d8132c9a166c3b62525c64f2f2645ea805ef2d"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.617453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8304-account-create-update-5bb5c" event={"ID":"c98ef602-49f4-4256-8a2d-b6a27ccaf903","Type":"ContainerStarted","Data":"7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.619055 4728 generic.go:334] "Generic (PLEG): container finished" podID="72c64530-79c6-4b13-a18f-70c0c1405d8f" containerID="148d543b70dcd94ea80e1f5af6e95f88aba4885de7f3365130208afbd2e75992" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.619137 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-214d-account-create-update-lpk9z" event={"ID":"72c64530-79c6-4b13-a18f-70c0c1405d8f","Type":"ContainerDied","Data":"148d543b70dcd94ea80e1f5af6e95f88aba4885de7f3365130208afbd2e75992"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.624735 4728 generic.go:334] "Generic (PLEG): container finished" podID="1f53576d-4b2d-4269-bea7-c3deb0d3292d" containerID="3544b9cfba4163ae7f828a3a06e45900a218e576a528e112093eb7d4515cb940" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.624767 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g59p8" event={"ID":"1f53576d-4b2d-4269-bea7-c3deb0d3292d","Type":"ContainerDied","Data":"3544b9cfba4163ae7f828a3a06e45900a218e576a528e112093eb7d4515cb940"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.624856 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g59p8" event={"ID":"1f53576d-4b2d-4269-bea7-c3deb0d3292d","Type":"ContainerStarted","Data":"527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.629415 4728 generic.go:334] "Generic (PLEG): container finished" podID="604ad92d-004e-41d5-9467-f8df44cfd9b2" containerID="795e8c6a5bd2c0182ab334359b95fb0893f4aa67fcc22a98c49d2116d6e627a7" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.629486 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-09b6-account-create-update-j5949" event={"ID":"604ad92d-004e-41d5-9467-f8df44cfd9b2","Type":"ContainerDied","Data":"795e8c6a5bd2c0182ab334359b95fb0893f4aa67fcc22a98c49d2116d6e627a7"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.629515 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-09b6-account-create-update-j5949" event={"ID":"604ad92d-004e-41d5-9467-f8df44cfd9b2","Type":"ContainerStarted","Data":"f783d6678bbe2d9ceaa45bbeb726069ceeb7185b6f3f962b32f49cdfa7487aee"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.631062 4728 generic.go:334] "Generic (PLEG): container finished" podID="23d1273f-c19e-48d7-b792-0a6db00bc94d" containerID="ecf8bdf78ab4f92089659d22261096826d89c943e519d73418dc0d27a92ac7b6" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.631113 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qgcwk" event={"ID":"23d1273f-c19e-48d7-b792-0a6db00bc94d","Type":"ContainerDied","Data":"ecf8bdf78ab4f92089659d22261096826d89c943e519d73418dc0d27a92ac7b6"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.631140 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qgcwk" event={"ID":"23d1273f-c19e-48d7-b792-0a6db00bc94d","Type":"ContainerStarted","Data":"f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.632231 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca6c-account-create-update-j9pql" event={"ID":"09a3c745-2007-4f55-b706-24f148fc1805","Type":"ContainerStarted","Data":"b6f2fed6267b0d4efda811159d9e88c10a18e062aded7b1cb2891abeba8f252f"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.632258 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca6c-account-create-update-j9pql" event={"ID":"09a3c745-2007-4f55-b706-24f148fc1805","Type":"ContainerStarted","Data":"96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.633347 4728 generic.go:334] "Generic (PLEG): container finished" podID="1077e45f-8397-4377-a4e4-6dea1e8f16cb" containerID="838d8f1f4dc964f976e1fc7ee93b6d68917231e4e59f7bd1b57e691a7d301e82" exitCode=0 Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.633380 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cblx8" event={"ID":"1077e45f-8397-4377-a4e4-6dea1e8f16cb","Type":"ContainerDied","Data":"838d8f1f4dc964f976e1fc7ee93b6d68917231e4e59f7bd1b57e691a7d301e82"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.633399 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cblx8" event={"ID":"1077e45f-8397-4377-a4e4-6dea1e8f16cb","Type":"ContainerStarted","Data":"def170020117b19c25ed9301c1b34c85153194efb427ac9c504d5fb22c83bac2"} Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.734457 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-ca6c-account-create-update-j9pql" podStartSLOduration=1.7344389709999999 podStartE2EDuration="1.734438971s" podCreationTimestamp="2025-12-05 11:29:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:01.730224349 +0000 UTC m=+1275.872347062" watchObservedRunningTime="2025-12-05 11:29:01.734438971 +0000 UTC m=+1275.876561664" Dec 05 11:29:01 crc kubenswrapper[4728]: I1205 11:29:01.751543 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dmlfm"] Dec 05 11:29:01 crc kubenswrapper[4728]: W1205 11:29:01.961258 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod540467c2_2e5b_489d_ab98_42b9d580136b.slice/crio-d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf WatchSource:0}: Error finding container d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf: Status 404 returned error can't find the container with id d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf Dec 05 11:29:02 crc kubenswrapper[4728]: I1205 11:29:02.642323 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmlfm" event={"ID":"540467c2-2e5b-489d-ab98-42b9d580136b","Type":"ContainerStarted","Data":"d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf"} Dec 05 11:29:02 crc kubenswrapper[4728]: I1205 11:29:02.647651 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"5fa435144863e6748dc3e3cdfe00711b9b1f335d34d3243b0cec0eeb034bcd4c"} Dec 05 11:29:02 crc kubenswrapper[4728]: I1205 11:29:02.649623 4728 generic.go:334] "Generic (PLEG): container finished" podID="09a3c745-2007-4f55-b706-24f148fc1805" containerID="b6f2fed6267b0d4efda811159d9e88c10a18e062aded7b1cb2891abeba8f252f" exitCode=0 Dec 05 11:29:02 crc kubenswrapper[4728]: I1205 11:29:02.650275 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca6c-account-create-update-j9pql" event={"ID":"09a3c745-2007-4f55-b706-24f148fc1805","Type":"ContainerDied","Data":"b6f2fed6267b0d4efda811159d9e88c10a18e062aded7b1cb2891abeba8f252f"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.098325 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qgcwk" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.247322 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts\") pod \"23d1273f-c19e-48d7-b792-0a6db00bc94d\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.247629 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4m8b\" (UniqueName: \"kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b\") pod \"23d1273f-c19e-48d7-b792-0a6db00bc94d\" (UID: \"23d1273f-c19e-48d7-b792-0a6db00bc94d\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.250604 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "23d1273f-c19e-48d7-b792-0a6db00bc94d" (UID: "23d1273f-c19e-48d7-b792-0a6db00bc94d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.254311 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b" (OuterVolumeSpecName: "kube-api-access-h4m8b") pod "23d1273f-c19e-48d7-b792-0a6db00bc94d" (UID: "23d1273f-c19e-48d7-b792-0a6db00bc94d"). InnerVolumeSpecName "kube-api-access-h4m8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.279264 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.310148 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzfv7" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.315258 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cblx8" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.338203 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.353395 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts\") pod \"72c64530-79c6-4b13-a18f-70c0c1405d8f\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.353622 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npqqd\" (UniqueName: \"kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd\") pod \"72c64530-79c6-4b13-a18f-70c0c1405d8f\" (UID: \"72c64530-79c6-4b13-a18f-70c0c1405d8f\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.358161 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23d1273f-c19e-48d7-b792-0a6db00bc94d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.358200 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4m8b\" (UniqueName: \"kubernetes.io/projected/23d1273f-c19e-48d7-b792-0a6db00bc94d-kube-api-access-h4m8b\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.358994 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72c64530-79c6-4b13-a18f-70c0c1405d8f" (UID: "72c64530-79c6-4b13-a18f-70c0c1405d8f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.361110 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd" (OuterVolumeSpecName: "kube-api-access-npqqd") pod "72c64530-79c6-4b13-a18f-70c0c1405d8f" (UID: "72c64530-79c6-4b13-a18f-70c0c1405d8f"). InnerVolumeSpecName "kube-api-access-npqqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.369459 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.370280 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463600 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts\") pod \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463663 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9dqf\" (UniqueName: \"kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf\") pod \"604ad92d-004e-41d5-9467-f8df44cfd9b2\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463694 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts\") pod \"604ad92d-004e-41d5-9467-f8df44cfd9b2\" (UID: \"604ad92d-004e-41d5-9467-f8df44cfd9b2\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463711 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts\") pod \"b96bd60f-bf85-4263-9e18-c367e742c780\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463737 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z85h6\" (UniqueName: \"kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6\") pod \"b96bd60f-bf85-4263-9e18-c367e742c780\" (UID: \"b96bd60f-bf85-4263-9e18-c367e742c780\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9p96\" (UniqueName: \"kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96\") pod \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463781 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts\") pod \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\" (UID: \"c98ef602-49f4-4256-8a2d-b6a27ccaf903\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463840 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts\") pod \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463861 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kgj4\" (UniqueName: \"kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4\") pod \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\" (UID: \"1077e45f-8397-4377-a4e4-6dea1e8f16cb\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.463882 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lj5m2\" (UniqueName: \"kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2\") pod \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\" (UID: \"1f53576d-4b2d-4269-bea7-c3deb0d3292d\") " Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.464376 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c64530-79c6-4b13-a18f-70c0c1405d8f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.464387 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npqqd\" (UniqueName: \"kubernetes.io/projected/72c64530-79c6-4b13-a18f-70c0c1405d8f-kube-api-access-npqqd\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.465125 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c98ef602-49f4-4256-8a2d-b6a27ccaf903" (UID: "c98ef602-49f4-4256-8a2d-b6a27ccaf903"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.467269 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1077e45f-8397-4377-a4e4-6dea1e8f16cb" (UID: "1077e45f-8397-4377-a4e4-6dea1e8f16cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.468114 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2" (OuterVolumeSpecName: "kube-api-access-lj5m2") pod "1f53576d-4b2d-4269-bea7-c3deb0d3292d" (UID: "1f53576d-4b2d-4269-bea7-c3deb0d3292d"). InnerVolumeSpecName "kube-api-access-lj5m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.468754 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "604ad92d-004e-41d5-9467-f8df44cfd9b2" (UID: "604ad92d-004e-41d5-9467-f8df44cfd9b2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.469097 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f53576d-4b2d-4269-bea7-c3deb0d3292d" (UID: "1f53576d-4b2d-4269-bea7-c3deb0d3292d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.469260 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6" (OuterVolumeSpecName: "kube-api-access-z85h6") pod "b96bd60f-bf85-4263-9e18-c367e742c780" (UID: "b96bd60f-bf85-4263-9e18-c367e742c780"). InnerVolumeSpecName "kube-api-access-z85h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.469426 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b96bd60f-bf85-4263-9e18-c367e742c780" (UID: "b96bd60f-bf85-4263-9e18-c367e742c780"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.471267 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96" (OuterVolumeSpecName: "kube-api-access-z9p96") pod "c98ef602-49f4-4256-8a2d-b6a27ccaf903" (UID: "c98ef602-49f4-4256-8a2d-b6a27ccaf903"). InnerVolumeSpecName "kube-api-access-z9p96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.475289 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf" (OuterVolumeSpecName: "kube-api-access-m9dqf") pod "604ad92d-004e-41d5-9467-f8df44cfd9b2" (UID: "604ad92d-004e-41d5-9467-f8df44cfd9b2"). InnerVolumeSpecName "kube-api-access-m9dqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.476633 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4" (OuterVolumeSpecName: "kube-api-access-4kgj4") pod "1077e45f-8397-4377-a4e4-6dea1e8f16cb" (UID: "1077e45f-8397-4377-a4e4-6dea1e8f16cb"). InnerVolumeSpecName "kube-api-access-4kgj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566592 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f53576d-4b2d-4269-bea7-c3deb0d3292d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566642 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9dqf\" (UniqueName: \"kubernetes.io/projected/604ad92d-004e-41d5-9467-f8df44cfd9b2-kube-api-access-m9dqf\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566655 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/604ad92d-004e-41d5-9467-f8df44cfd9b2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566665 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b96bd60f-bf85-4263-9e18-c367e742c780-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566688 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z85h6\" (UniqueName: \"kubernetes.io/projected/b96bd60f-bf85-4263-9e18-c367e742c780-kube-api-access-z85h6\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566698 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9p96\" (UniqueName: \"kubernetes.io/projected/c98ef602-49f4-4256-8a2d-b6a27ccaf903-kube-api-access-z9p96\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566706 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c98ef602-49f4-4256-8a2d-b6a27ccaf903-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566713 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1077e45f-8397-4377-a4e4-6dea1e8f16cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566721 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kgj4\" (UniqueName: \"kubernetes.io/projected/1077e45f-8397-4377-a4e4-6dea1e8f16cb-kube-api-access-4kgj4\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.566729 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lj5m2\" (UniqueName: \"kubernetes.io/projected/1f53576d-4b2d-4269-bea7-c3deb0d3292d-kube-api-access-lj5m2\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.658271 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzfv7" event={"ID":"b96bd60f-bf85-4263-9e18-c367e742c780","Type":"ContainerDied","Data":"3dc11a987bd358e059e1dbceb6d39b239e3f1710f5051c19a1629f7f60f0b61b"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.658313 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dc11a987bd358e059e1dbceb6d39b239e3f1710f5051c19a1629f7f60f0b61b" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.658369 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzfv7" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.661096 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-214d-account-create-update-lpk9z" event={"ID":"72c64530-79c6-4b13-a18f-70c0c1405d8f","Type":"ContainerDied","Data":"87df3882800f796b4e9ae4dabeb09b80b9c58b59454ca5abd4862219c065305b"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.661136 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87df3882800f796b4e9ae4dabeb09b80b9c58b59454ca5abd4862219c065305b" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.661224 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-214d-account-create-update-lpk9z" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.665905 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g59p8" event={"ID":"1f53576d-4b2d-4269-bea7-c3deb0d3292d","Type":"ContainerDied","Data":"527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.665942 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="527d01d70e2d6e45328672ebef6e0d7a869b422a579a247e5712788ffd5c07ba" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.665975 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g59p8" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.673128 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"baa1b0bcc8562bd518c4225c8005276bf24d7fec7da6513e786c58ddca648e5a"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.673169 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"b8789ad01a1e6827d8fa1a4f159019dd8476870b8f1125b69ee931b5dcc17a70"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.673178 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"5c288c8a727a773de3f09efe5425a6b8bdb5ca3f726b4de3d4af7c9262460c75"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.674536 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qgcwk" event={"ID":"23d1273f-c19e-48d7-b792-0a6db00bc94d","Type":"ContainerDied","Data":"f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.674566 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0762e7b50a9b5b145d9df3a035b942ac20714fec5c8dd9228b3a285fe1a6fa9" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.674631 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qgcwk" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.676938 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cblx8" event={"ID":"1077e45f-8397-4377-a4e4-6dea1e8f16cb","Type":"ContainerDied","Data":"def170020117b19c25ed9301c1b34c85153194efb427ac9c504d5fb22c83bac2"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.676970 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def170020117b19c25ed9301c1b34c85153194efb427ac9c504d5fb22c83bac2" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.677036 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cblx8" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.683311 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8304-account-create-update-5bb5c" event={"ID":"c98ef602-49f4-4256-8a2d-b6a27ccaf903","Type":"ContainerDied","Data":"7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.683777 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7011e9c8bd6c7e43efcfbb0e558af829caba56c894d5dbefd167b68c11964104" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.683350 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8304-account-create-update-5bb5c" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.685880 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-09b6-account-create-update-j5949" event={"ID":"604ad92d-004e-41d5-9467-f8df44cfd9b2","Type":"ContainerDied","Data":"f783d6678bbe2d9ceaa45bbeb726069ceeb7185b6f3f962b32f49cdfa7487aee"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.685910 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f783d6678bbe2d9ceaa45bbeb726069ceeb7185b6f3f962b32f49cdfa7487aee" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.685984 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-09b6-account-create-update-j5949" Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.689927 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8fpw9" event={"ID":"30ec6fee-e0fe-471a-a673-7856319a8dd8","Type":"ContainerStarted","Data":"03a65b5ee9a21cbc1b78c58eaf7bd51a3afbf8d82414e0138044b5e4a9c34a49"} Dec 05 11:29:03 crc kubenswrapper[4728]: I1205 11:29:03.708832 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-8fpw9" podStartSLOduration=3.290840223 podStartE2EDuration="41.708781556s" podCreationTimestamp="2025-12-05 11:28:22 +0000 UTC" firstStartedPulling="2025-12-05 11:28:23.622676728 +0000 UTC m=+1237.764799421" lastFinishedPulling="2025-12-05 11:29:02.040618061 +0000 UTC m=+1276.182740754" observedRunningTime="2025-12-05 11:29:03.707615585 +0000 UTC m=+1277.849738288" watchObservedRunningTime="2025-12-05 11:29:03.708781556 +0000 UTC m=+1277.850904249" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.022042 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.175493 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vntvq\" (UniqueName: \"kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq\") pod \"09a3c745-2007-4f55-b706-24f148fc1805\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.175572 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts\") pod \"09a3c745-2007-4f55-b706-24f148fc1805\" (UID: \"09a3c745-2007-4f55-b706-24f148fc1805\") " Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.176203 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "09a3c745-2007-4f55-b706-24f148fc1805" (UID: "09a3c745-2007-4f55-b706-24f148fc1805"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.179339 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq" (OuterVolumeSpecName: "kube-api-access-vntvq") pod "09a3c745-2007-4f55-b706-24f148fc1805" (UID: "09a3c745-2007-4f55-b706-24f148fc1805"). InnerVolumeSpecName "kube-api-access-vntvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.277966 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vntvq\" (UniqueName: \"kubernetes.io/projected/09a3c745-2007-4f55-b706-24f148fc1805-kube-api-access-vntvq\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.278026 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09a3c745-2007-4f55-b706-24f148fc1805-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:04 crc kubenswrapper[4728]: E1205 11:29:04.458701 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09a3c745_2007_4f55_b706_24f148fc1805.slice\": RecentStats: unable to find data in memory cache]" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.697125 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ca6c-account-create-update-j9pql" event={"ID":"09a3c745-2007-4f55-b706-24f148fc1805","Type":"ContainerDied","Data":"96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769"} Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.697159 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96cf79006d75c20ec555fc7cd896b7290e9ea36e87062310ad9eee526a9bb769" Dec 05 11:29:04 crc kubenswrapper[4728]: I1205 11:29:04.697193 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ca6c-account-create-update-j9pql" Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.764747 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"e2acf81ea9f8830672536f376d9aa2ab9b53b0d2a89ba01645aceb7ae13a4d96"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.765275 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"c2ed1b00b2797076599582e2b297ef8c086a75af57bd5987e19b2bb488ad04aa"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.765286 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"c665678eb4913eb472bcf3cd771abdcc2e3cff9f1c8b6d29da5491b7d47068c9"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.765297 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"104712c1759aefbce9bfc0594cbbf510fba90d04c483c471cb78fcc4176d72dc"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.765305 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"524b320b1a9ecf6d125613af43715a32e27f0ca46261291c75050fa12bb4700a"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.767570 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmlfm" event={"ID":"540467c2-2e5b-489d-ab98-42b9d580136b","Type":"ContainerStarted","Data":"8aba3552233c2cc4bfb86d76b28eab73d4bfbc4134750835367c51395c5981d0"} Dec 05 11:29:09 crc kubenswrapper[4728]: I1205 11:29:09.787952 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-dmlfm" podStartSLOduration=3.131375541 podStartE2EDuration="9.787935836s" podCreationTimestamp="2025-12-05 11:29:00 +0000 UTC" firstStartedPulling="2025-12-05 11:29:02.013570214 +0000 UTC m=+1276.155692907" lastFinishedPulling="2025-12-05 11:29:08.670130509 +0000 UTC m=+1282.812253202" observedRunningTime="2025-12-05 11:29:09.78316721 +0000 UTC m=+1283.925289903" watchObservedRunningTime="2025-12-05 11:29:09.787935836 +0000 UTC m=+1283.930058529" Dec 05 11:29:12 crc kubenswrapper[4728]: I1205 11:29:12.814446 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"14bdfcf7fddee266ee5a0f66464dabc8e2be89cf8bddaf29986a5cda9174c612"} Dec 05 11:29:12 crc kubenswrapper[4728]: I1205 11:29:12.815059 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a64af6ac-e922-435f-bee9-1cc7e7a95f4a","Type":"ContainerStarted","Data":"dca0ed86630ab22fa104ba0e8ef3147696f3a384241d0ad018fb79dd9e879c23"} Dec 05 11:29:12 crc kubenswrapper[4728]: I1205 11:29:12.852130 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.432500292 podStartE2EDuration="48.85210618s" podCreationTimestamp="2025-12-05 11:28:24 +0000 UTC" firstStartedPulling="2025-12-05 11:28:58.242999172 +0000 UTC m=+1272.385121865" lastFinishedPulling="2025-12-05 11:29:08.66260506 +0000 UTC m=+1282.804727753" observedRunningTime="2025-12-05 11:29:12.844635392 +0000 UTC m=+1286.986758085" watchObservedRunningTime="2025-12-05 11:29:12.85210618 +0000 UTC m=+1286.994228863" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112061 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112412 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604ad92d-004e-41d5-9467-f8df44cfd9b2" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112426 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="604ad92d-004e-41d5-9467-f8df44cfd9b2" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112439 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c98ef602-49f4-4256-8a2d-b6a27ccaf903" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112445 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c98ef602-49f4-4256-8a2d-b6a27ccaf903" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112464 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1077e45f-8397-4377-a4e4-6dea1e8f16cb" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112470 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1077e45f-8397-4377-a4e4-6dea1e8f16cb" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112481 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d1273f-c19e-48d7-b792-0a6db00bc94d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112486 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d1273f-c19e-48d7-b792-0a6db00bc94d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112496 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c64530-79c6-4b13-a18f-70c0c1405d8f" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112502 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c64530-79c6-4b13-a18f-70c0c1405d8f" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112512 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f53576d-4b2d-4269-bea7-c3deb0d3292d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112517 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f53576d-4b2d-4269-bea7-c3deb0d3292d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112528 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b96bd60f-bf85-4263-9e18-c367e742c780" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112533 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b96bd60f-bf85-4263-9e18-c367e742c780" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: E1205 11:29:13.112552 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09a3c745-2007-4f55-b706-24f148fc1805" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112561 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="09a3c745-2007-4f55-b706-24f148fc1805" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112711 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="604ad92d-004e-41d5-9467-f8df44cfd9b2" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112726 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1077e45f-8397-4377-a4e4-6dea1e8f16cb" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112734 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d1273f-c19e-48d7-b792-0a6db00bc94d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112746 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f53576d-4b2d-4269-bea7-c3deb0d3292d" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112760 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b96bd60f-bf85-4263-9e18-c367e742c780" containerName="mariadb-database-create" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112770 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="09a3c745-2007-4f55-b706-24f148fc1805" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112780 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="72c64530-79c6-4b13-a18f-70c0c1405d8f" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.112805 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c98ef602-49f4-4256-8a2d-b6a27ccaf903" containerName="mariadb-account-create-update" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.113566 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.116401 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.128374 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.239382 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.239437 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.239476 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.239696 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.239860 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.240067 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tv27\" (UniqueName: \"kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341156 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tv27\" (UniqueName: \"kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341411 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341432 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341460 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341490 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.341529 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.342292 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.342436 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.342489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.342834 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.343461 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.359658 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tv27\" (UniqueName: \"kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27\") pod \"dnsmasq-dns-764c5664d7-p5mjl\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:13 crc kubenswrapper[4728]: I1205 11:29:13.430915 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:13.823854 4728 generic.go:334] "Generic (PLEG): container finished" podID="540467c2-2e5b-489d-ab98-42b9d580136b" containerID="8aba3552233c2cc4bfb86d76b28eab73d4bfbc4134750835367c51395c5981d0" exitCode=0 Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:13.823917 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmlfm" event={"ID":"540467c2-2e5b-489d-ab98-42b9d580136b","Type":"ContainerDied","Data":"8aba3552233c2cc4bfb86d76b28eab73d4bfbc4134750835367c51395c5981d0"} Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:14.391835 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:14 crc kubenswrapper[4728]: W1205 11:29:14.401917 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c1ba8a1_58c2_4266_ab59_5c5c1aa7258d.slice/crio-d2c1a33b5adf2186bc5d493e44a708309a482c950cf693e0eac7079ba7a493b9 WatchSource:0}: Error finding container d2c1a33b5adf2186bc5d493e44a708309a482c950cf693e0eac7079ba7a493b9: Status 404 returned error can't find the container with id d2c1a33b5adf2186bc5d493e44a708309a482c950cf693e0eac7079ba7a493b9 Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:14.848654 4728 generic.go:334] "Generic (PLEG): container finished" podID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerID="ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e" exitCode=0 Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:14.848901 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" event={"ID":"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d","Type":"ContainerDied","Data":"ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e"} Dec 05 11:29:14 crc kubenswrapper[4728]: I1205 11:29:14.849641 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" event={"ID":"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d","Type":"ContainerStarted","Data":"d2c1a33b5adf2186bc5d493e44a708309a482c950cf693e0eac7079ba7a493b9"} Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.063123 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.170965 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data\") pod \"540467c2-2e5b-489d-ab98-42b9d580136b\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.171004 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle\") pod \"540467c2-2e5b-489d-ab98-42b9d580136b\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.171074 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwnps\" (UniqueName: \"kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps\") pod \"540467c2-2e5b-489d-ab98-42b9d580136b\" (UID: \"540467c2-2e5b-489d-ab98-42b9d580136b\") " Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.174956 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps" (OuterVolumeSpecName: "kube-api-access-nwnps") pod "540467c2-2e5b-489d-ab98-42b9d580136b" (UID: "540467c2-2e5b-489d-ab98-42b9d580136b"). InnerVolumeSpecName "kube-api-access-nwnps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.195637 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "540467c2-2e5b-489d-ab98-42b9d580136b" (UID: "540467c2-2e5b-489d-ab98-42b9d580136b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.240697 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data" (OuterVolumeSpecName: "config-data") pod "540467c2-2e5b-489d-ab98-42b9d580136b" (UID: "540467c2-2e5b-489d-ab98-42b9d580136b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.273212 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.273297 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/540467c2-2e5b-489d-ab98-42b9d580136b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.273312 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwnps\" (UniqueName: \"kubernetes.io/projected/540467c2-2e5b-489d-ab98-42b9d580136b-kube-api-access-nwnps\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.860220 4728 generic.go:334] "Generic (PLEG): container finished" podID="30ec6fee-e0fe-471a-a673-7856319a8dd8" containerID="03a65b5ee9a21cbc1b78c58eaf7bd51a3afbf8d82414e0138044b5e4a9c34a49" exitCode=0 Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.860268 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8fpw9" event={"ID":"30ec6fee-e0fe-471a-a673-7856319a8dd8","Type":"ContainerDied","Data":"03a65b5ee9a21cbc1b78c58eaf7bd51a3afbf8d82414e0138044b5e4a9c34a49"} Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.862558 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" event={"ID":"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d","Type":"ContainerStarted","Data":"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d"} Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.862679 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.865075 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmlfm" event={"ID":"540467c2-2e5b-489d-ab98-42b9d580136b","Type":"ContainerDied","Data":"d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf"} Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.865104 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d243da4a1c7789664688db2b51d5122ca024f7376f085731dee6951fcf1885bf" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.865157 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmlfm" Dec 05 11:29:15 crc kubenswrapper[4728]: I1205 11:29:15.911687 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" podStartSLOduration=2.91166404 podStartE2EDuration="2.91166404s" podCreationTimestamp="2025-12-05 11:29:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:15.905720053 +0000 UTC m=+1290.047842766" watchObservedRunningTime="2025-12-05 11:29:15.91166404 +0000 UTC m=+1290.053786733" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.028886 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dlvvg"] Dec 05 11:29:16 crc kubenswrapper[4728]: E1205 11:29:16.029247 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="540467c2-2e5b-489d-ab98-42b9d580136b" containerName="keystone-db-sync" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.029263 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="540467c2-2e5b-489d-ab98-42b9d580136b" containerName="keystone-db-sync" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.029429 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="540467c2-2e5b-489d-ab98-42b9d580136b" containerName="keystone-db-sync" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.029986 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.038649 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.038783 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.038971 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.041703 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rz6qv" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.046424 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.046582 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.061370 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dlvvg"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.089528 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.089624 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.089659 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.089686 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stb9m\" (UniqueName: \"kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.089842 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.090006 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.091422 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.100408 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.157858 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191599 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191659 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191683 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjm4z\" (UniqueName: \"kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191725 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191764 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191820 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191858 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stb9m\" (UniqueName: \"kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191889 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.191996 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.194906 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.194998 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.195083 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.200216 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.204780 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.213491 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.215000 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.219688 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.229715 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stb9m\" (UniqueName: \"kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m\") pod \"keystone-bootstrap-dlvvg\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297202 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297510 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297566 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297644 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297664 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.297678 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjm4z\" (UniqueName: \"kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.298646 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.299284 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.299489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.299866 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.300165 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.302219 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-dtppr"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.304476 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.321346 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5pzsc" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.321512 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.321539 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.353415 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.366611 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjm4z\" (UniqueName: \"kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z\") pod \"dnsmasq-dns-5959f8865f-7tpcz\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.379939 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.381458 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dtppr"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.381555 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.390217 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398750 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398830 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398884 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398904 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398928 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wz7k\" (UniqueName: \"kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.398957 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.404668 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.416972 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-lwndl" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.417992 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.423205 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.424886 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-j552g"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.426166 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.445203 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-656mz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.445381 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.445498 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.452842 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j552g"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.467437 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.490764 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-zbbv6"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.492650 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.501779 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-75ffx" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.502062 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.502956 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.502988 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503031 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrj6f\" (UniqueName: \"kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503048 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503080 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503096 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503120 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503141 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wz7k\" (UniqueName: \"kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503159 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503185 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503206 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503223 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr8mw\" (UniqueName: \"kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503259 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.503282 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.513965 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-zbbv6"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.514396 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.518470 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.523947 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.524433 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.537857 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.548366 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wz7k\" (UniqueName: \"kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k\") pod \"cinder-db-sync-dtppr\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.586950 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-fm2df"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.588237 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.593786 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.594804 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vxzsz" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.604902 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.604951 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.604970 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr8mw\" (UniqueName: \"kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605005 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td8b6\" (UniqueName: \"kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605035 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605050 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605085 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605100 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605129 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605179 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrj6f\" (UniqueName: \"kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605194 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.605229 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.610880 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.615213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.621002 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.621069 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.622031 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dtppr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.625708 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.626014 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.661336 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fm2df"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.664589 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrj6f\" (UniqueName: \"kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f\") pod \"neutron-db-sync-j552g\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.664711 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr8mw\" (UniqueName: \"kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw\") pod \"horizon-847bd6b965-vxxz2\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708387 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkvlk\" (UniqueName: \"kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708479 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td8b6\" (UniqueName: \"kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708514 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708550 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708579 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708616 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.708642 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.716504 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.725362 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.747532 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td8b6\" (UniqueName: \"kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.769020 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data\") pod \"manila-db-sync-zbbv6\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.796859 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.801145 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.812438 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.812506 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkvlk\" (UniqueName: \"kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.812672 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.813648 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.834186 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.835708 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.839327 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.840067 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.846462 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.850189 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.850518 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.851027 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkvlk\" (UniqueName: \"kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk\") pod \"barbican-db-sync-fm2df\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.854523 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.867731 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.869267 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.891957 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sg5mr"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.893710 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.900064 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.900167 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v7fpr" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.902987 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.927759 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.932655 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937497 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937591 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5nvc\" (UniqueName: \"kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937615 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937642 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937704 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937742 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmqct\" (UniqueName: \"kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937764 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937819 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937910 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.937979 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938030 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938059 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938082 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938116 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938153 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938214 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938253 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtkjw\" (UniqueName: \"kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.938314 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.961854 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-zbbv6" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.962329 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sg5mr"] Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.988334 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fm2df" Dec 05 11:29:16 crc kubenswrapper[4728]: I1205 11:29:16.990888 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046528 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046752 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046778 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w59rs\" (UniqueName: \"kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046824 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046849 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046863 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046881 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046906 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046922 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046944 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046978 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.046992 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047013 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtkjw\" (UniqueName: \"kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047027 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047056 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047077 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047100 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5nvc\" (UniqueName: \"kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047115 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047129 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047162 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047179 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmqct\" (UniqueName: \"kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047194 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.047235 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.048035 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.052041 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.052571 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.054718 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.058548 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.061611 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.062099 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.062252 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.065158 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.065495 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.073639 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.080048 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.082758 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.083823 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.084345 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.091341 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5nvc\" (UniqueName: \"kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc\") pod \"ceilometer-0\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.092532 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmqct\" (UniqueName: \"kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct\") pod \"dnsmasq-dns-58dd9ff6bc-2tx9v\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.097447 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtkjw\" (UniqueName: \"kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw\") pod \"horizon-766d597b85-nkvzq\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.149142 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.149210 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w59rs\" (UniqueName: \"kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.149254 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.149298 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.149318 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.151335 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.154151 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.158032 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.160848 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.168156 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.170277 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.174279 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w59rs\" (UniqueName: \"kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs\") pod \"placement-db-sync-sg5mr\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.225903 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dlvvg"] Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.254392 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.281366 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.312261 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sg5mr" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.446455 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dtppr"] Dec 05 11:29:17 crc kubenswrapper[4728]: W1205 11:29:17.530083 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod875f6746_18ef_483c_bbb4_80d7dbe4b1a1.slice/crio-7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c WatchSource:0}: Error finding container 7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c: Status 404 returned error can't find the container with id 7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.621838 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8fpw9" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.769137 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle\") pod \"30ec6fee-e0fe-471a-a673-7856319a8dd8\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.769632 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzzfz\" (UniqueName: \"kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz\") pod \"30ec6fee-e0fe-471a-a673-7856319a8dd8\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.769669 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data\") pod \"30ec6fee-e0fe-471a-a673-7856319a8dd8\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.769704 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data\") pod \"30ec6fee-e0fe-471a-a673-7856319a8dd8\" (UID: \"30ec6fee-e0fe-471a-a673-7856319a8dd8\") " Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.774371 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "30ec6fee-e0fe-471a-a673-7856319a8dd8" (UID: "30ec6fee-e0fe-471a-a673-7856319a8dd8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.777089 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz" (OuterVolumeSpecName: "kube-api-access-fzzfz") pod "30ec6fee-e0fe-471a-a673-7856319a8dd8" (UID: "30ec6fee-e0fe-471a-a673-7856319a8dd8"). InnerVolumeSpecName "kube-api-access-fzzfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.814989 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30ec6fee-e0fe-471a-a673-7856319a8dd8" (UID: "30ec6fee-e0fe-471a-a673-7856319a8dd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.863107 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data" (OuterVolumeSpecName: "config-data") pod "30ec6fee-e0fe-471a-a673-7856319a8dd8" (UID: "30ec6fee-e0fe-471a-a673-7856319a8dd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.871951 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzzfz\" (UniqueName: \"kubernetes.io/projected/30ec6fee-e0fe-471a-a673-7856319a8dd8-kube-api-access-fzzfz\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.871991 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.872003 4728 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.872014 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec6fee-e0fe-471a-a673-7856319a8dd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.913124 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlvvg" event={"ID":"3b80572c-d607-43b8-85da-02ae5a6ae057","Type":"ContainerStarted","Data":"8d00f641b64e0dc77217bfa3f3b6e0e64613764b19e41a5bb7bf5bc57fefb456"} Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.913163 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlvvg" event={"ID":"3b80572c-d607-43b8-85da-02ae5a6ae057","Type":"ContainerStarted","Data":"90b851b6108926ce9bcf8417dcfb1e157ce459fb2d006f9665a5357f8e99dc65"} Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.917270 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j552g"] Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.921782 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dtppr" event={"ID":"875f6746-18ef-483c-bbb4-80d7dbe4b1a1","Type":"ContainerStarted","Data":"7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c"} Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.933343 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8fpw9" event={"ID":"30ec6fee-e0fe-471a-a673-7856319a8dd8","Type":"ContainerDied","Data":"ca7cfe6f02286d1e94c5e08b838a22fe050979f2de963deed628fc9ad1b66bbd"} Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.933387 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca7cfe6f02286d1e94c5e08b838a22fe050979f2de963deed628fc9ad1b66bbd" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.934024 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8fpw9" Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.962130 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" event={"ID":"1a448999-fcb4-41e2-bad8-991c8dad1561","Type":"ContainerStarted","Data":"96825b9a1d75e848710361658f87c4e7689f5ab3fa5cb0c9e68637bf67e6e7d6"} Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.962125 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="dnsmasq-dns" containerID="cri-o://d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d" gracePeriod=10 Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.962824 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" podUID="1a448999-fcb4-41e2-bad8-991c8dad1561" containerName="init" containerID="cri-o://d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17" gracePeriod=10 Dec 05 11:29:17 crc kubenswrapper[4728]: I1205 11:29:17.982517 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fm2df"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.046269 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.080780 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.087229 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.094065 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dlvvg" podStartSLOduration=3.094046666 podStartE2EDuration="3.094046666s" podCreationTimestamp="2025-12-05 11:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:17.950033121 +0000 UTC m=+1292.092155814" watchObservedRunningTime="2025-12-05 11:29:18.094046666 +0000 UTC m=+1292.236169359" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.110811 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:18 crc kubenswrapper[4728]: W1205 11:29:18.128843 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod198c5a44_d3b2_4afd_b034_d898309e0f42.slice/crio-0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3 WatchSource:0}: Error finding container 0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3: Status 404 returned error can't find the container with id 0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3 Dec 05 11:29:18 crc kubenswrapper[4728]: W1205 11:29:18.129654 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb59078b7_77d3_42a5_8ce6_9ef6107377b5.slice/crio-60357595d70b30fd0d45ed94efcd4c189d9e7432a866bdab690b892a88a57790 WatchSource:0}: Error finding container 60357595d70b30fd0d45ed94efcd4c189d9e7432a866bdab690b892a88a57790: Status 404 returned error can't find the container with id 60357595d70b30fd0d45ed94efcd4c189d9e7432a866bdab690b892a88a57790 Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.158211 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sg5mr"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.162132 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-zbbv6"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.331091 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.341847 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:29:18 crc kubenswrapper[4728]: E1205 11:29:18.342242 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" containerName="glance-db-sync" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.342261 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" containerName="glance-db-sync" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.342430 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" containerName="glance-db-sync" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.348945 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.404872 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491294 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491704 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491774 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491850 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491897 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.491988 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxbbm\" (UniqueName: \"kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.594953 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxbbm\" (UniqueName: \"kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.595099 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.595161 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.595218 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.595273 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.595310 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.596604 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.596734 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.597623 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.598265 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.599552 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.622298 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxbbm\" (UniqueName: \"kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm\") pod \"dnsmasq-dns-785d8bcb8c-snxbb\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.685995 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.708585 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801138 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801309 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801370 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801388 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tv27\" (UniqueName: \"kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801559 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.801598 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config\") pod \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\" (UID: \"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.802489 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.825080 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27" (OuterVolumeSpecName: "kube-api-access-6tv27") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "kube-api-access-6tv27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.855293 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.859173 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config" (OuterVolumeSpecName: "config") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.875334 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.902650 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.902829 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.902923 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.902959 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjm4z\" (UniqueName: \"kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.902987 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.903009 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb\") pod \"1a448999-fcb4-41e2-bad8-991c8dad1561\" (UID: \"1a448999-fcb4-41e2-bad8-991c8dad1561\") " Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.903328 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.903339 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.903349 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tv27\" (UniqueName: \"kubernetes.io/projected/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-kube-api-access-6tv27\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.903357 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.912468 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.917359 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z" (OuterVolumeSpecName: "kube-api-access-hjm4z") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "kube-api-access-hjm4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.918515 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" (UID: "2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.937581 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.944391 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.947873 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.950455 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.956304 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config" (OuterVolumeSpecName: "config") pod "1a448999-fcb4-41e2-bad8-991c8dad1561" (UID: "1a448999-fcb4-41e2-bad8-991c8dad1561"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.981141 4728 generic.go:334] "Generic (PLEG): container finished" podID="b59078b7-77d3-42a5-8ce6-9ef6107377b5" containerID="61e54474e2db2dbe4b0322cefc4869406aacf1895b80aa44442413ae6b491e3f" exitCode=0 Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.981342 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" event={"ID":"b59078b7-77d3-42a5-8ce6-9ef6107377b5","Type":"ContainerDied","Data":"61e54474e2db2dbe4b0322cefc4869406aacf1895b80aa44442413ae6b491e3f"} Dec 05 11:29:18 crc kubenswrapper[4728]: I1205 11:29:18.981368 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" event={"ID":"b59078b7-77d3-42a5-8ce6-9ef6107377b5","Type":"ContainerStarted","Data":"60357595d70b30fd0d45ed94efcd4c189d9e7432a866bdab690b892a88a57790"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.000752 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerStarted","Data":"3d7c688c4097698417f3ffe9cdd94dc9c7e500c58100bb28e5443023bdf0f232"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.001692 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fm2df" event={"ID":"555e531f-162f-4097-ba36-53b6ddedd6d8","Type":"ContainerStarted","Data":"6ceb4b0f4ba9e2a2ac68ef8bd37e7619389cc8d5de9c516c436892269fa4c530"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004667 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjm4z\" (UniqueName: \"kubernetes.io/projected/1a448999-fcb4-41e2-bad8-991c8dad1561-kube-api-access-hjm4z\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004690 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004700 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004709 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004720 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004741 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004752 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.004759 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a448999-fcb4-41e2-bad8-991c8dad1561-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.012229 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-847bd6b965-vxxz2" event={"ID":"55ead1aa-62ac-4873-9d68-d409a2823f8c","Type":"ContainerStarted","Data":"bb173083beb298112343bb179410eab5131657cba009c1f1b448381b90aeffc7"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.040311 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-766d597b85-nkvzq" event={"ID":"bae2c0c5-184c-4c07-890e-2ba44bf39533","Type":"ContainerStarted","Data":"cdb86fb9b05eee2c1b69edb904c403537549a9612d19d21c0ffe047503c22759"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.044253 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sg5mr" event={"ID":"198c5a44-d3b2-4afd-b034-d898309e0f42","Type":"ContainerStarted","Data":"0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.048580 4728 generic.go:334] "Generic (PLEG): container finished" podID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerID="d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d" exitCode=0 Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.048644 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" event={"ID":"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d","Type":"ContainerDied","Data":"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.048664 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" event={"ID":"2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d","Type":"ContainerDied","Data":"d2c1a33b5adf2186bc5d493e44a708309a482c950cf693e0eac7079ba7a493b9"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.048680 4728 scope.go:117] "RemoveContainer" containerID="d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.048779 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-p5mjl" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.064948 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-zbbv6" event={"ID":"537c7276-c2c9-4427-9b2b-5e835e3bc2d7","Type":"ContainerStarted","Data":"db6d68d8ddfc5b058bd6f7baf5a35db447bb253ee5fa3818568176ef8eb0b1bc"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.080998 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j552g" event={"ID":"9e792568-52ab-4080-bd08-8d6ef2f15ee7","Type":"ContainerStarted","Data":"266530df784a322eb87fc13e1fd3e4001c9e02cccbe288ba7aaf883c9968a82c"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.081039 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j552g" event={"ID":"9e792568-52ab-4080-bd08-8d6ef2f15ee7","Type":"ContainerStarted","Data":"6f6bab616e0a23e5cd84cd8bb0dae54636ab9b44a626fc8159d63f858cca16ad"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.088726 4728 generic.go:334] "Generic (PLEG): container finished" podID="1a448999-fcb4-41e2-bad8-991c8dad1561" containerID="d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17" exitCode=0 Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.088884 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.088925 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" event={"ID":"1a448999-fcb4-41e2-bad8-991c8dad1561","Type":"ContainerDied","Data":"d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.088981 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-7tpcz" event={"ID":"1a448999-fcb4-41e2-bad8-991c8dad1561","Type":"ContainerDied","Data":"96825b9a1d75e848710361658f87c4e7689f5ab3fa5cb0c9e68637bf67e6e7d6"} Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.104267 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.113639 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-p5mjl"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.121473 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-j552g" podStartSLOduration=3.121454499 podStartE2EDuration="3.121454499s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:19.100007941 +0000 UTC m=+1293.242130634" watchObservedRunningTime="2025-12-05 11:29:19.121454499 +0000 UTC m=+1293.263577192" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.152952 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.160490 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-7tpcz"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.171045 4728 scope.go:117] "RemoveContainer" containerID="ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.222492 4728 scope.go:117] "RemoveContainer" containerID="d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d" Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.236286 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d\": container with ID starting with d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d not found: ID does not exist" containerID="d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.236493 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d"} err="failed to get container status \"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d\": rpc error: code = NotFound desc = could not find container \"d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d\": container with ID starting with d2452d93920cec8ea6055fc763ad30008c00d7effbc77e049a164083719ced6d not found: ID does not exist" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.236520 4728 scope.go:117] "RemoveContainer" containerID="ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e" Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.236738 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e\": container with ID starting with ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e not found: ID does not exist" containerID="ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.236759 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e"} err="failed to get container status \"ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e\": rpc error: code = NotFound desc = could not find container \"ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e\": container with ID starting with ebab175f43921f5c9a2da6346d672521371ec1dbe4362d10d44b380d6793e32e not found: ID does not exist" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.236771 4728 scope.go:117] "RemoveContainer" containerID="d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.239148 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:29:19 crc kubenswrapper[4728]: W1205 11:29:19.265532 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a8bd9ce_fdd1_4d9e_a9c8_5ecf3b7ba7b1.slice/crio-47a847aa13d9f64fae9288c7af7228c7d178ca8b00602da601fd32e0d5b55c66 WatchSource:0}: Error finding container 47a847aa13d9f64fae9288c7af7228c7d178ca8b00602da601fd32e0d5b55c66: Status 404 returned error can't find the container with id 47a847aa13d9f64fae9288c7af7228c7d178ca8b00602da601fd32e0d5b55c66 Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.329457 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.329811 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="init" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.329827 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="init" Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.329845 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="dnsmasq-dns" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.329852 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="dnsmasq-dns" Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.329875 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a448999-fcb4-41e2-bad8-991c8dad1561" containerName="init" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.329881 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a448999-fcb4-41e2-bad8-991c8dad1561" containerName="init" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.330059 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" containerName="dnsmasq-dns" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.330085 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a448999-fcb4-41e2-bad8-991c8dad1561" containerName="init" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.333318 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.336267 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.337217 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.337498 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.337772 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4w4f7" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.359544 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.366189 4728 scope.go:117] "RemoveContainer" containerID="d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17" Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.369354 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17\": container with ID starting with d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17 not found: ID does not exist" containerID="d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.369398 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17"} err="failed to get container status \"d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17\": rpc error: code = NotFound desc = could not find container \"d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17\": container with ID starting with d3092af7886d27f5e79766a6e8135ec4b6011383c7fcf6ac6b67c2ed8d32bd17 not found: ID does not exist" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.431467 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.433091 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.441578 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.444240 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523367 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523411 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523441 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523490 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523524 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523562 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grpw7\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523592 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.523623 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.549291 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.550055 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph combined-ca-bundle config-data glance httpd-run kube-api-access-grpw7 logs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-external-api-0" podUID="50e74892-1d32-4450-8387-853bfd73cad6" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.563816 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.618420 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.622406 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.626961 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.638014 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk85f\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.638305 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646143 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646410 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646496 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646570 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646639 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646718 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646821 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.646900 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.647063 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.647172 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.647270 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.647385 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grpw7\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.647462 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.651321 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.651658 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.656229 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.657991 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.660353 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.661247 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.669919 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.688309 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.722523 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.728146 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.732043 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grpw7\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.747616 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750059 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750095 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750329 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750577 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750966 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.751058 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.750461 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.751352 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.751449 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.752893 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.753497 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.753571 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.753663 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk85f\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.753703 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.753773 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f8jw\" (UniqueName: \"kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.754360 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.766471 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.769621 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.770542 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:19 crc kubenswrapper[4728]: E1205 11:29:19.771251 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph glance kube-api-access-vk85f scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-internal-api-0" podUID="679721ff-6a89-4f4c-88b8-bb50f315a086" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.778203 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.803049 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk85f\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.809668 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855321 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855364 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855436 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855546 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855601 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855618 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmqct\" (UniqueName: \"kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct\") pod \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\" (UID: \"b59078b7-77d3-42a5-8ce6-9ef6107377b5\") " Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855884 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f8jw\" (UniqueName: \"kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.855974 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.856061 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.856105 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.856128 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.860603 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.863851 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct" (OuterVolumeSpecName: "kube-api-access-mmqct") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "kube-api-access-mmqct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.868308 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.868627 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.902162 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.913466 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f8jw\" (UniqueName: \"kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.915269 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.916124 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config" (OuterVolumeSpecName: "config") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.920351 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key\") pod \"horizon-6d7b85cb69-vxpnj\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.924577 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.956038 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.965687 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.965736 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.965751 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.965766 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmqct\" (UniqueName: \"kubernetes.io/projected/b59078b7-77d3-42a5-8ce6-9ef6107377b5-kube-api-access-mmqct\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:19 crc kubenswrapper[4728]: I1205 11:29:19.965778 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.001584 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b59078b7-77d3-42a5-8ce6-9ef6107377b5" (UID: "b59078b7-77d3-42a5-8ce6-9ef6107377b5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.067765 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b59078b7-77d3-42a5-8ce6-9ef6107377b5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.086255 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.129470 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" event={"ID":"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1","Type":"ContainerStarted","Data":"47a847aa13d9f64fae9288c7af7228c7d178ca8b00602da601fd32e0d5b55c66"} Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.134871 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.135596 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.136540 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.136934 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-2tx9v" event={"ID":"b59078b7-77d3-42a5-8ce6-9ef6107377b5","Type":"ContainerDied","Data":"60357595d70b30fd0d45ed94efcd4c189d9e7432a866bdab690b892a88a57790"} Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.136977 4728 scope.go:117] "RemoveContainer" containerID="61e54474e2db2dbe4b0322cefc4869406aacf1895b80aa44442413ae6b491e3f" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.162270 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.171888 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.253855 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.260275 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-2tx9v"] Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270301 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270370 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270399 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk85f\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270421 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270437 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270451 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270483 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270516 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270596 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270620 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270654 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.270713 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grpw7\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.272594 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.273020 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs" (OuterVolumeSpecName: "logs") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.272762 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.274892 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.274960 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data\") pod \"50e74892-1d32-4450-8387-853bfd73cad6\" (UID: \"50e74892-1d32-4450-8387-853bfd73cad6\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.275022 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.276473 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data\") pod \"679721ff-6a89-4f4c-88b8-bb50f315a086\" (UID: \"679721ff-6a89-4f4c-88b8-bb50f315a086\") " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.279369 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.279393 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/679721ff-6a89-4f4c-88b8-bb50f315a086-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.279402 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.279700 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs" (OuterVolumeSpecName: "logs") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.283264 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph" (OuterVolumeSpecName: "ceph") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.284433 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f" (OuterVolumeSpecName: "kube-api-access-vk85f") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "kube-api-access-vk85f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.284508 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.284559 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7" (OuterVolumeSpecName: "kube-api-access-grpw7") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "kube-api-access-grpw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.286300 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts" (OuterVolumeSpecName: "scripts") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.286416 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.287938 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data" (OuterVolumeSpecName: "config-data") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.288266 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data" (OuterVolumeSpecName: "config-data") pod "50e74892-1d32-4450-8387-853bfd73cad6" (UID: "50e74892-1d32-4450-8387-853bfd73cad6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.295216 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts" (OuterVolumeSpecName: "scripts") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.295402 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.295879 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph" (OuterVolumeSpecName: "ceph") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.299254 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "679721ff-6a89-4f4c-88b8-bb50f315a086" (UID: "679721ff-6a89-4f4c-88b8-bb50f315a086"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.367406 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a448999-fcb4-41e2-bad8-991c8dad1561" path="/var/lib/kubelet/pods/1a448999-fcb4-41e2-bad8-991c8dad1561/volumes" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.368244 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d" path="/var/lib/kubelet/pods/2c1ba8a1-58c2-4266-ab59-5c5c1aa7258d/volumes" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.368991 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b59078b7-77d3-42a5-8ce6-9ef6107377b5" path="/var/lib/kubelet/pods/b59078b7-77d3-42a5-8ce6-9ef6107377b5/volumes" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383036 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383071 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grpw7\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-kube-api-access-grpw7\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383201 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383213 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383222 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383231 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/50e74892-1d32-4450-8387-853bfd73cad6-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383239 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk85f\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-kube-api-access-vk85f\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383247 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383255 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/679721ff-6a89-4f4c-88b8-bb50f315a086-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383263 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/679721ff-6a89-4f4c-88b8-bb50f315a086-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383271 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e74892-1d32-4450-8387-853bfd73cad6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383279 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e74892-1d32-4450-8387-853bfd73cad6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.383293 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.412139 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.415972 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.486960 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.487012 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:20 crc kubenswrapper[4728]: I1205 11:29:20.634451 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.151086 4728 generic.go:334] "Generic (PLEG): container finished" podID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerID="681f11ee3ecda221214b21a459c08c722f866b98a91ed4b958121041d9cad81b" exitCode=0 Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.151193 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" event={"ID":"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1","Type":"ContainerDied","Data":"681f11ee3ecda221214b21a459c08c722f866b98a91ed4b958121041d9cad81b"} Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.166730 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d7b85cb69-vxpnj" event={"ID":"eeb8f353-d0fc-4195-82c3-bff916a2ca01","Type":"ContainerStarted","Data":"9d653e1f6b8ebd81739550d6006a04a63c7a6f9c8603dd456525002b2b1be14b"} Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.166753 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.166785 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.332221 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.353096 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.367961 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: E1205 11:29:21.368843 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59078b7-77d3-42a5-8ce6-9ef6107377b5" containerName="init" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.368864 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59078b7-77d3-42a5-8ce6-9ef6107377b5" containerName="init" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.369495 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b59078b7-77d3-42a5-8ce6-9ef6107377b5" containerName="init" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.371106 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.381022 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.381059 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.381417 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.381580 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4w4f7" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.402457 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.445429 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.468025 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.473273 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.480859 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.481992 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.486651 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506595 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506862 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lxxt\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506909 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506945 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506968 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.506994 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.507018 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.507047 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609156 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609227 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609254 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lxxt\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609333 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609456 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609521 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609555 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609579 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609648 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609693 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgfkx\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609766 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.609840 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610018 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610049 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610221 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610445 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610473 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610566 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.610618 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.617704 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.618047 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.618588 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.620490 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.639913 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lxxt\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.643145 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.712741 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.712868 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.712930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.712978 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.713014 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.713037 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.713069 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgfkx\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.713121 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.714122 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.716726 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.716743 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.717190 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.722656 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.723238 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.725097 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.743421 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgfkx\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.800071 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:21 crc kubenswrapper[4728]: I1205 11:29:21.827290 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " pod="openstack/glance-default-external-api-0" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.110459 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.193935 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" event={"ID":"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1","Type":"ContainerStarted","Data":"f9d3a8d0d5662b90a3cbb65dc1936090e52994eecc9ea5045d9019598ed88cdd"} Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.194184 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.219897 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" podStartSLOduration=4.219881459 podStartE2EDuration="4.219881459s" podCreationTimestamp="2025-12-05 11:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:22.217931807 +0000 UTC m=+1296.360054520" watchObservedRunningTime="2025-12-05 11:29:22.219881459 +0000 UTC m=+1296.362004152" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.369766 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50e74892-1d32-4450-8387-853bfd73cad6" path="/var/lib/kubelet/pods/50e74892-1d32-4450-8387-853bfd73cad6/volumes" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.370315 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="679721ff-6a89-4f4c-88b8-bb50f315a086" path="/var/lib/kubelet/pods/679721ff-6a89-4f4c-88b8-bb50f315a086/volumes" Dec 05 11:29:22 crc kubenswrapper[4728]: I1205 11:29:22.900772 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:23 crc kubenswrapper[4728]: I1205 11:29:23.014568 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:23 crc kubenswrapper[4728]: W1205 11:29:23.028443 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f9de743_47da_494a_aad4_fa892a1c0677.slice/crio-f586635dd81867f490e12b90b97d436a531a5aa8b54e91bbfb3080e317b537a1 WatchSource:0}: Error finding container f586635dd81867f490e12b90b97d436a531a5aa8b54e91bbfb3080e317b537a1: Status 404 returned error can't find the container with id f586635dd81867f490e12b90b97d436a531a5aa8b54e91bbfb3080e317b537a1 Dec 05 11:29:23 crc kubenswrapper[4728]: I1205 11:29:23.209604 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerStarted","Data":"9434a542e5c22bf5d580e423744419145620f153a2c089c6c17554aef5a15f53"} Dec 05 11:29:23 crc kubenswrapper[4728]: I1205 11:29:23.213438 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f9de743-47da-494a-aad4-fa892a1c0677","Type":"ContainerStarted","Data":"f586635dd81867f490e12b90b97d436a531a5aa8b54e91bbfb3080e317b537a1"} Dec 05 11:29:24 crc kubenswrapper[4728]: I1205 11:29:24.247418 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerStarted","Data":"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e"} Dec 05 11:29:24 crc kubenswrapper[4728]: I1205 11:29:24.253479 4728 generic.go:334] "Generic (PLEG): container finished" podID="3b80572c-d607-43b8-85da-02ae5a6ae057" containerID="8d00f641b64e0dc77217bfa3f3b6e0e64613764b19e41a5bb7bf5bc57fefb456" exitCode=0 Dec 05 11:29:24 crc kubenswrapper[4728]: I1205 11:29:24.253507 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlvvg" event={"ID":"3b80572c-d607-43b8-85da-02ae5a6ae057","Type":"ContainerDied","Data":"8d00f641b64e0dc77217bfa3f3b6e0e64613764b19e41a5bb7bf5bc57fefb456"} Dec 05 11:29:26 crc kubenswrapper[4728]: I1205 11:29:26.274901 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f9de743-47da-494a-aad4-fa892a1c0677","Type":"ContainerStarted","Data":"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3"} Dec 05 11:29:28 crc kubenswrapper[4728]: I1205 11:29:28.711160 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:29:28 crc kubenswrapper[4728]: I1205 11:29:28.775175 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:29:28 crc kubenswrapper[4728]: I1205 11:29:28.775465 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" containerID="cri-o://20298d372b55710c9d6c93f1d2dc85ecfe33aa32e270c2a1172aa7a7c9d13321" gracePeriod=10 Dec 05 11:29:29 crc kubenswrapper[4728]: I1205 11:29:29.201435 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:29 crc kubenswrapper[4728]: I1205 11:29:29.302659 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:29:29 crc kubenswrapper[4728]: I1205 11:29:29.330418 4728 generic.go:334] "Generic (PLEG): container finished" podID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerID="20298d372b55710c9d6c93f1d2dc85ecfe33aa32e270c2a1172aa7a7c9d13321" exitCode=0 Dec 05 11:29:29 crc kubenswrapper[4728]: I1205 11:29:29.330466 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-8qq5x" event={"ID":"41e42f3d-f948-4eb4-99cb-842f4e17c69c","Type":"ContainerDied","Data":"20298d372b55710c9d6c93f1d2dc85ecfe33aa32e270c2a1172aa7a7c9d13321"} Dec 05 11:29:29 crc kubenswrapper[4728]: I1205 11:29:29.742686 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.502551 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.561218 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.563068 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.576073 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.599735 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625467 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625528 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625577 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625627 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctnkh\" (UniqueName: \"kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625668 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625721 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.625929 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727040 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727839 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727922 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727946 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727970 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.727992 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctnkh\" (UniqueName: \"kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.728012 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.728036 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.729404 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.730058 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.730921 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.735532 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.736504 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.757829 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctnkh\" (UniqueName: \"kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.759733 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle\") pod \"horizon-5d7bdb6c68-cfbgd\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.845907 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7755888bd8-shzsv"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.847566 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.859608 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7755888bd8-shzsv"] Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.892259 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.935515 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-combined-ca-bundle\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.935587 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-secret-key\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.935964 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-config-data\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.935998 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-tls-certs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.936042 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841ca27f-0486-413e-975b-4f51b008883a-logs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.936165 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjpzv\" (UniqueName: \"kubernetes.io/projected/841ca27f-0486-413e-975b-4f51b008883a-kube-api-access-pjpzv\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:30 crc kubenswrapper[4728]: I1205 11:29:30.936188 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-scripts\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038110 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-secret-key\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038569 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-config-data\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038602 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-tls-certs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038637 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841ca27f-0486-413e-975b-4f51b008883a-logs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038675 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjpzv\" (UniqueName: \"kubernetes.io/projected/841ca27f-0486-413e-975b-4f51b008883a-kube-api-access-pjpzv\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.038945 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-scripts\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.039108 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841ca27f-0486-413e-975b-4f51b008883a-logs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.039208 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-combined-ca-bundle\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.039714 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-scripts\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.039934 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/841ca27f-0486-413e-975b-4f51b008883a-config-data\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.041731 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-tls-certs\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.053744 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-horizon-secret-key\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.053879 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841ca27f-0486-413e-975b-4f51b008883a-combined-ca-bundle\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.057450 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjpzv\" (UniqueName: \"kubernetes.io/projected/841ca27f-0486-413e-975b-4f51b008883a-kube-api-access-pjpzv\") pod \"horizon-7755888bd8-shzsv\" (UID: \"841ca27f-0486-413e-975b-4f51b008883a\") " pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:31 crc kubenswrapper[4728]: I1205 11:29:31.198823 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:29:34 crc kubenswrapper[4728]: I1205 11:29:34.742464 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 11:29:39 crc kubenswrapper[4728]: I1205 11:29:39.742709 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 11:29:39 crc kubenswrapper[4728]: I1205 11:29:39.743438 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:29:44 crc kubenswrapper[4728]: I1205 11:29:44.742968 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 11:29:45 crc kubenswrapper[4728]: E1205 11:29:45.354607 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e792568_52ab_4080_bd08_8d6ef2f15ee7.slice/crio-conmon-266530df784a322eb87fc13e1fd3e4001c9e02cccbe288ba7aaf883c9968a82c.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:29:45 crc kubenswrapper[4728]: I1205 11:29:45.506532 4728 generic.go:334] "Generic (PLEG): container finished" podID="9e792568-52ab-4080-bd08-8d6ef2f15ee7" containerID="266530df784a322eb87fc13e1fd3e4001c9e02cccbe288ba7aaf883c9968a82c" exitCode=0 Dec 05 11:29:45 crc kubenswrapper[4728]: I1205 11:29:45.506597 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j552g" event={"ID":"9e792568-52ab-4080-bd08-8d6ef2f15ee7","Type":"ContainerDied","Data":"266530df784a322eb87fc13e1fd3e4001c9e02cccbe288ba7aaf883c9968a82c"} Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.019662 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.020246 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n669hf5h5d7h64bh646h646h6bh668h74hb8h559h56bh66fh699h54fh648h697h556h588hc4h667h555h54bh5c8h77h598h57fh695hd5h94h5c7h668q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qtkjw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-766d597b85-nkvzq_openstack(bae2c0c5-184c-4c07-890e-2ba44bf39533): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.023700 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-766d597b85-nkvzq" podUID="bae2c0c5-184c-4c07-890e-2ba44bf39533" Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.029142 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.029333 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nc6hddh6h5bbh5d8hbh655h574h56fhch566h56ch56hf6h89h76hb8h54h56chb8h7h57ch54ch558h57ch54fh7dh95h5bfh59ch68bhdbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rr8mw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-847bd6b965-vxxz2_openstack(55ead1aa-62ac-4873-9d68-d409a2823f8c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:46 crc kubenswrapper[4728]: E1205 11:29:46.031858 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-847bd6b965-vxxz2" podUID="55ead1aa-62ac-4873-9d68-d409a2823f8c" Dec 05 11:29:48 crc kubenswrapper[4728]: E1205 11:29:48.256962 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 11:29:48 crc kubenswrapper[4728]: E1205 11:29:48.257633 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n7bh9h69hd9h696h598h5b5h5ddh5f5h55ch677h5b9h9fh7ch6fh5fh664hc5h9ch94h57ch57dhbdh5d4h597h5c8h98h55ch6fh667h584h556q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8f8jw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6d7b85cb69-vxpnj_openstack(eeb8f353-d0fc-4195-82c3-bff916a2ca01): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:48 crc kubenswrapper[4728]: E1205 11:29:48.261118 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6d7b85cb69-vxpnj" podUID="eeb8f353-d0fc-4195-82c3-bff916a2ca01" Dec 05 11:29:49 crc kubenswrapper[4728]: I1205 11:29:49.742207 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-8qq5x" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.375861 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.464874 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.464989 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.465032 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stb9m\" (UniqueName: \"kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.465060 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.465095 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.465193 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys\") pod \"3b80572c-d607-43b8-85da-02ae5a6ae057\" (UID: \"3b80572c-d607-43b8-85da-02ae5a6ae057\") " Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.470187 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m" (OuterVolumeSpecName: "kube-api-access-stb9m") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "kube-api-access-stb9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.470341 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts" (OuterVolumeSpecName: "scripts") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.472055 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.474896 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.491205 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.512950 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data" (OuterVolumeSpecName: "config-data") pod "3b80572c-d607-43b8-85da-02ae5a6ae057" (UID: "3b80572c-d607-43b8-85da-02ae5a6ae057"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.565548 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dlvvg" event={"ID":"3b80572c-d607-43b8-85da-02ae5a6ae057","Type":"ContainerDied","Data":"90b851b6108926ce9bcf8417dcfb1e157ce459fb2d006f9665a5357f8e99dc65"} Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.565580 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90b851b6108926ce9bcf8417dcfb1e157ce459fb2d006f9665a5357f8e99dc65" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.565626 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dlvvg" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568029 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568060 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stb9m\" (UniqueName: \"kubernetes.io/projected/3b80572c-d607-43b8-85da-02ae5a6ae057-kube-api-access-stb9m\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568076 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568087 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568098 4728 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:51 crc kubenswrapper[4728]: I1205 11:29:51.568108 4728 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b80572c-d607-43b8-85da-02ae5a6ae057-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.573613 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dlvvg"] Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.578181 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j552g" event={"ID":"9e792568-52ab-4080-bd08-8d6ef2f15ee7","Type":"ContainerDied","Data":"6f6bab616e0a23e5cd84cd8bb0dae54636ab9b44a626fc8159d63f858cca16ad"} Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.578220 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f6bab616e0a23e5cd84cd8bb0dae54636ab9b44a626fc8159d63f858cca16ad" Dec 05 11:29:52 crc kubenswrapper[4728]: E1205 11:29:52.581821 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 11:29:52 crc kubenswrapper[4728]: E1205 11:29:52.581980 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6wz7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-dtppr_openstack(875f6746-18ef-483c-bbb4-80d7dbe4b1a1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:52 crc kubenswrapper[4728]: E1205 11:29:52.583211 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-dtppr" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.586171 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dlvvg"] Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.620104 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.666445 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-27gkv"] Dec 05 11:29:52 crc kubenswrapper[4728]: E1205 11:29:52.667474 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b80572c-d607-43b8-85da-02ae5a6ae057" containerName="keystone-bootstrap" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.667495 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b80572c-d607-43b8-85da-02ae5a6ae057" containerName="keystone-bootstrap" Dec 05 11:29:52 crc kubenswrapper[4728]: E1205 11:29:52.667522 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e792568-52ab-4080-bd08-8d6ef2f15ee7" containerName="neutron-db-sync" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.667553 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e792568-52ab-4080-bd08-8d6ef2f15ee7" containerName="neutron-db-sync" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.667907 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e792568-52ab-4080-bd08-8d6ef2f15ee7" containerName="neutron-db-sync" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.667926 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b80572c-d607-43b8-85da-02ae5a6ae057" containerName="keystone-bootstrap" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.673061 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.678974 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-27gkv"] Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.679282 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.679351 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.679376 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.679407 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.679514 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rz6qv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.692716 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config\") pod \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.692937 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle\") pod \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.693087 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrj6f\" (UniqueName: \"kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f\") pod \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\" (UID: \"9e792568-52ab-4080-bd08-8d6ef2f15ee7\") " Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.701503 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f" (OuterVolumeSpecName: "kube-api-access-wrj6f") pod "9e792568-52ab-4080-bd08-8d6ef2f15ee7" (UID: "9e792568-52ab-4080-bd08-8d6ef2f15ee7"). InnerVolumeSpecName "kube-api-access-wrj6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.719435 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config" (OuterVolumeSpecName: "config") pod "9e792568-52ab-4080-bd08-8d6ef2f15ee7" (UID: "9e792568-52ab-4080-bd08-8d6ef2f15ee7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.721097 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e792568-52ab-4080-bd08-8d6ef2f15ee7" (UID: "9e792568-52ab-4080-bd08-8d6ef2f15ee7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795350 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795393 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795421 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795437 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jwpb\" (UniqueName: \"kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795458 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795544 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795947 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795972 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrj6f\" (UniqueName: \"kubernetes.io/projected/9e792568-52ab-4080-bd08-8d6ef2f15ee7-kube-api-access-wrj6f\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.795984 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9e792568-52ab-4080-bd08-8d6ef2f15ee7-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897690 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897740 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897774 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897809 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jwpb\" (UniqueName: \"kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897834 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.897859 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.902755 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.903307 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.903599 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.906532 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.912610 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.913724 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jwpb\" (UniqueName: \"kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb\") pod \"keystone-bootstrap-27gkv\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:52 crc kubenswrapper[4728]: I1205 11:29:52.996199 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.386767 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.397526 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:53 crc kubenswrapper[4728]: E1205 11:29:53.403107 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-manila-api:current-podified" Dec 05 11:29:53 crc kubenswrapper[4728]: E1205 11:29:53.403283 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manila-db-sync,Image:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,Command:[/bin/bash],Args:[-c sleep 0 && /usr/bin/manila-manage --config-dir /etc/manila/manila.conf.d db sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:job-config-data,ReadOnly:true,MountPath:/etc/manila/manila.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-td8b6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42429,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42429,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-db-sync-zbbv6_openstack(537c7276-c2c9-4427-9b2b-5e835e3bc2d7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:53 crc kubenswrapper[4728]: E1205 11:29:53.404725 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manila-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/manila-db-sync-zbbv6" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406300 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key\") pod \"55ead1aa-62ac-4873-9d68-d409a2823f8c\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406406 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts\") pod \"55ead1aa-62ac-4873-9d68-d409a2823f8c\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406536 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr8mw\" (UniqueName: \"kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw\") pod \"55ead1aa-62ac-4873-9d68-d409a2823f8c\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406665 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data\") pod \"55ead1aa-62ac-4873-9d68-d409a2823f8c\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406773 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs\") pod \"55ead1aa-62ac-4873-9d68-d409a2823f8c\" (UID: \"55ead1aa-62ac-4873-9d68-d409a2823f8c\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.406990 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts" (OuterVolumeSpecName: "scripts") pod "55ead1aa-62ac-4873-9d68-d409a2823f8c" (UID: "55ead1aa-62ac-4873-9d68-d409a2823f8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.407201 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs" (OuterVolumeSpecName: "logs") pod "55ead1aa-62ac-4873-9d68-d409a2823f8c" (UID: "55ead1aa-62ac-4873-9d68-d409a2823f8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.407323 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.407382 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ead1aa-62ac-4873-9d68-d409a2823f8c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.407690 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data" (OuterVolumeSpecName: "config-data") pod "55ead1aa-62ac-4873-9d68-d409a2823f8c" (UID: "55ead1aa-62ac-4873-9d68-d409a2823f8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.409149 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.413687 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw" (OuterVolumeSpecName: "kube-api-access-rr8mw") pod "55ead1aa-62ac-4873-9d68-d409a2823f8c" (UID: "55ead1aa-62ac-4873-9d68-d409a2823f8c"). InnerVolumeSpecName "kube-api-access-rr8mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.416751 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "55ead1aa-62ac-4873-9d68-d409a2823f8c" (UID: "55ead1aa-62ac-4873-9d68-d409a2823f8c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.508851 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f8jw\" (UniqueName: \"kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw\") pod \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.509111 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts\") pod \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.509238 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts\") pod \"bae2c0c5-184c-4c07-890e-2ba44bf39533\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.509357 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data\") pod \"bae2c0c5-184c-4c07-890e-2ba44bf39533\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.509444 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key\") pod \"bae2c0c5-184c-4c07-890e-2ba44bf39533\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.510421 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs\") pod \"bae2c0c5-184c-4c07-890e-2ba44bf39533\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.510596 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key\") pod \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.509920 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts" (OuterVolumeSpecName: "scripts") pod "bae2c0c5-184c-4c07-890e-2ba44bf39533" (UID: "bae2c0c5-184c-4c07-890e-2ba44bf39533"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.510308 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts" (OuterVolumeSpecName: "scripts") pod "eeb8f353-d0fc-4195-82c3-bff916a2ca01" (UID: "eeb8f353-d0fc-4195-82c3-bff916a2ca01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.510365 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data" (OuterVolumeSpecName: "config-data") pod "bae2c0c5-184c-4c07-890e-2ba44bf39533" (UID: "bae2c0c5-184c-4c07-890e-2ba44bf39533"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.511058 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs" (OuterVolumeSpecName: "logs") pod "bae2c0c5-184c-4c07-890e-2ba44bf39533" (UID: "bae2c0c5-184c-4c07-890e-2ba44bf39533"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.511369 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs\") pod \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.511449 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data\") pod \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\" (UID: \"eeb8f353-d0fc-4195-82c3-bff916a2ca01\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.512347 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtkjw\" (UniqueName: \"kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw\") pod \"bae2c0c5-184c-4c07-890e-2ba44bf39533\" (UID: \"bae2c0c5-184c-4c07-890e-2ba44bf39533\") " Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.511569 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs" (OuterVolumeSpecName: "logs") pod "eeb8f353-d0fc-4195-82c3-bff916a2ca01" (UID: "eeb8f353-d0fc-4195-82c3-bff916a2ca01"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.512283 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data" (OuterVolumeSpecName: "config-data") pod "eeb8f353-d0fc-4195-82c3-bff916a2ca01" (UID: "eeb8f353-d0fc-4195-82c3-bff916a2ca01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.513048 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr8mw\" (UniqueName: \"kubernetes.io/projected/55ead1aa-62ac-4873-9d68-d409a2823f8c-kube-api-access-rr8mw\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.513126 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb8f353-d0fc-4195-82c3-bff916a2ca01-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.513194 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514125 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55ead1aa-62ac-4873-9d68-d409a2823f8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514215 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eeb8f353-d0fc-4195-82c3-bff916a2ca01-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514272 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514345 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bae2c0c5-184c-4c07-890e-2ba44bf39533-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514414 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bae2c0c5-184c-4c07-890e-2ba44bf39533-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514468 4728 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/55ead1aa-62ac-4873-9d68-d409a2823f8c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.513126 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "bae2c0c5-184c-4c07-890e-2ba44bf39533" (UID: "bae2c0c5-184c-4c07-890e-2ba44bf39533"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.514097 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "eeb8f353-d0fc-4195-82c3-bff916a2ca01" (UID: "eeb8f353-d0fc-4195-82c3-bff916a2ca01"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.515006 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw" (OuterVolumeSpecName: "kube-api-access-8f8jw") pod "eeb8f353-d0fc-4195-82c3-bff916a2ca01" (UID: "eeb8f353-d0fc-4195-82c3-bff916a2ca01"). InnerVolumeSpecName "kube-api-access-8f8jw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.515368 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw" (OuterVolumeSpecName: "kube-api-access-qtkjw") pod "bae2c0c5-184c-4c07-890e-2ba44bf39533" (UID: "bae2c0c5-184c-4c07-890e-2ba44bf39533"). InnerVolumeSpecName "kube-api-access-qtkjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.593996 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847bd6b965-vxxz2" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.593989 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-847bd6b965-vxxz2" event={"ID":"55ead1aa-62ac-4873-9d68-d409a2823f8c","Type":"ContainerDied","Data":"bb173083beb298112343bb179410eab5131657cba009c1f1b448381b90aeffc7"} Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.597164 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d7b85cb69-vxpnj" event={"ID":"eeb8f353-d0fc-4195-82c3-bff916a2ca01","Type":"ContainerDied","Data":"9d653e1f6b8ebd81739550d6006a04a63c7a6f9c8603dd456525002b2b1be14b"} Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.597403 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7b85cb69-vxpnj" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.606222 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-766d597b85-nkvzq" event={"ID":"bae2c0c5-184c-4c07-890e-2ba44bf39533","Type":"ContainerDied","Data":"cdb86fb9b05eee2c1b69edb904c403537549a9612d19d21c0ffe047503c22759"} Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.606405 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-766d597b85-nkvzq" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.607434 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j552g" Dec 05 11:29:53 crc kubenswrapper[4728]: E1205 11:29:53.609688 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-dtppr" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.616171 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtkjw\" (UniqueName: \"kubernetes.io/projected/bae2c0c5-184c-4c07-890e-2ba44bf39533-kube-api-access-qtkjw\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.616206 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f8jw\" (UniqueName: \"kubernetes.io/projected/eeb8f353-d0fc-4195-82c3-bff916a2ca01-kube-api-access-8f8jw\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.616216 4728 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bae2c0c5-184c-4c07-890e-2ba44bf39533-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.616224 4728 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eeb8f353-d0fc-4195-82c3-bff916a2ca01-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:53 crc kubenswrapper[4728]: E1205 11:29:53.618677 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manila-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-api:current-podified\\\"\"" pod="openstack/manila-db-sync-zbbv6" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.716135 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.728750 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-847bd6b965-vxxz2"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.746926 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.766160 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-766d597b85-nkvzq"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.810850 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.823619 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d7b85cb69-vxpnj"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.940821 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.942172 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:53 crc kubenswrapper[4728]: I1205 11:29:53.973286 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027631 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027706 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkfpk\" (UniqueName: \"kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027741 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027764 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027839 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.027882 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.033577 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.034956 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.041487 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.041775 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-656mz" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.042092 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.042250 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.071160 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.128902 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkfpk\" (UniqueName: \"kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.128952 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.128972 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.129015 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.129059 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.129142 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.130014 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.131053 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.131538 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.132030 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.132555 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.168483 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkfpk\" (UniqueName: \"kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk\") pod \"dnsmasq-dns-55f844cf75-6pvrj\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.231534 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn7zj\" (UniqueName: \"kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.231635 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.231675 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.231701 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.231760 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.288614 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.333893 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.333930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.334004 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.334038 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn7zj\" (UniqueName: \"kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.334095 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.339557 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.339868 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.340464 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.352658 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn7zj\" (UniqueName: \"kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.363405 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs\") pod \"neutron-76476c596-dh9zg\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.366562 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b80572c-d607-43b8-85da-02ae5a6ae057" path="/var/lib/kubelet/pods/3b80572c-d607-43b8-85da-02ae5a6ae057/volumes" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.367454 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55ead1aa-62ac-4873-9d68-d409a2823f8c" path="/var/lib/kubelet/pods/55ead1aa-62ac-4873-9d68-d409a2823f8c/volumes" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.368001 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bae2c0c5-184c-4c07-890e-2ba44bf39533" path="/var/lib/kubelet/pods/bae2c0c5-184c-4c07-890e-2ba44bf39533/volumes" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.368960 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeb8f353-d0fc-4195-82c3-bff916a2ca01" path="/var/lib/kubelet/pods/eeb8f353-d0fc-4195-82c3-bff916a2ca01/volumes" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.381287 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:54 crc kubenswrapper[4728]: E1205 11:29:54.435962 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 11:29:54 crc kubenswrapper[4728]: E1205 11:29:54.436126 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qkvlk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-fm2df_openstack(555e531f-162f-4097-ba36-53b6ddedd6d8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:29:54 crc kubenswrapper[4728]: E1205 11:29:54.437581 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-fm2df" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.506633 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.644676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb\") pod \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.644994 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npnkh\" (UniqueName: \"kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh\") pod \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.645084 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb\") pod \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.645173 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc\") pod \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.645193 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config\") pod \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\" (UID: \"41e42f3d-f948-4eb4-99cb-842f4e17c69c\") " Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.650443 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh" (OuterVolumeSpecName: "kube-api-access-npnkh") pod "41e42f3d-f948-4eb4-99cb-842f4e17c69c" (UID: "41e42f3d-f948-4eb4-99cb-842f4e17c69c"). InnerVolumeSpecName "kube-api-access-npnkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.658206 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-8qq5x" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.658661 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-8qq5x" event={"ID":"41e42f3d-f948-4eb4-99cb-842f4e17c69c","Type":"ContainerDied","Data":"74c1c3bb1d6fbaba5c2712c22502a0f5dc04c4161ea81d78b025b1f580a35442"} Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.658704 4728 scope.go:117] "RemoveContainer" containerID="20298d372b55710c9d6c93f1d2dc85ecfe33aa32e270c2a1172aa7a7c9d13321" Dec 05 11:29:54 crc kubenswrapper[4728]: E1205 11:29:54.669657 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-fm2df" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.752982 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npnkh\" (UniqueName: \"kubernetes.io/projected/41e42f3d-f948-4eb4-99cb-842f4e17c69c-kube-api-access-npnkh\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.765202 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "41e42f3d-f948-4eb4-99cb-842f4e17c69c" (UID: "41e42f3d-f948-4eb4-99cb-842f4e17c69c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.787893 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config" (OuterVolumeSpecName: "config") pod "41e42f3d-f948-4eb4-99cb-842f4e17c69c" (UID: "41e42f3d-f948-4eb4-99cb-842f4e17c69c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.792659 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "41e42f3d-f948-4eb4-99cb-842f4e17c69c" (UID: "41e42f3d-f948-4eb4-99cb-842f4e17c69c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.800493 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "41e42f3d-f948-4eb4-99cb-842f4e17c69c" (UID: "41e42f3d-f948-4eb4-99cb-842f4e17c69c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.854434 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.854470 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.854479 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.854488 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e42f3d-f948-4eb4-99cb-842f4e17c69c-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:54 crc kubenswrapper[4728]: I1205 11:29:54.925210 4728 scope.go:117] "RemoveContainer" containerID="0490cf24113cf1667977d9e6060029a15dd491d52411b265b412b902db5c3d1e" Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.020287 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.040805 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-8qq5x"] Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.049840 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.212072 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7755888bd8-shzsv"] Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.304317 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:29:55 crc kubenswrapper[4728]: W1205 11:29:55.315309 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2657bde_2b78_49b2_bb33_a085f5a42024.slice/crio-bada9909a6a41cb02ba67ceeafaecb0c6b4a6a02ce584400e5f5535421f40b41 WatchSource:0}: Error finding container bada9909a6a41cb02ba67ceeafaecb0c6b4a6a02ce584400e5f5535421f40b41: Status 404 returned error can't find the container with id bada9909a6a41cb02ba67ceeafaecb0c6b4a6a02ce584400e5f5535421f40b41 Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.350765 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.371085 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-27gkv"] Dec 05 11:29:55 crc kubenswrapper[4728]: W1205 11:29:55.375576 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod289975c4_8dcc_4318_8c23_5acee4caa8bd.slice/crio-a66a9b305ca0855f3cf3c75cb0bd28bd4cc70385bd6da3db9ebb3b7d7921af99 WatchSource:0}: Error finding container a66a9b305ca0855f3cf3c75cb0bd28bd4cc70385bd6da3db9ebb3b7d7921af99: Status 404 returned error can't find the container with id a66a9b305ca0855f3cf3c75cb0bd28bd4cc70385bd6da3db9ebb3b7d7921af99 Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.669698 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sg5mr" event={"ID":"198c5a44-d3b2-4afd-b034-d898309e0f42","Type":"ContainerStarted","Data":"1ca97bc11021d729f13692aab870d70e82f18e3fa12ecc24a41950063813b560"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.677790 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerStarted","Data":"bada9909a6a41cb02ba67ceeafaecb0c6b4a6a02ce584400e5f5535421f40b41"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.680452 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-27gkv" event={"ID":"1a984c5b-a1ab-47ab-9acd-998c05072ea1","Type":"ContainerStarted","Data":"9b759cbb9491aaceff3419835e53693db3728ea44a6e24ed7238930655d04bac"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.681295 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerStarted","Data":"a66a9b305ca0855f3cf3c75cb0bd28bd4cc70385bd6da3db9ebb3b7d7921af99"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.682478 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerStarted","Data":"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.683752 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerStarted","Data":"26204862c5bf40d04605923ab2aea195411bd311bb0d38fc57ec4f8156e1de7d"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.684702 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7755888bd8-shzsv" event={"ID":"841ca27f-0486-413e-975b-4f51b008883a","Type":"ContainerStarted","Data":"4ffd07f08099f9db762883c394cf1a5981093b2fd9514f1dddf4d4d76e5c4647"} Dec 05 11:29:55 crc kubenswrapper[4728]: I1205 11:29:55.699222 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sg5mr" podStartSLOduration=4.475018601 podStartE2EDuration="39.699207789s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="2025-12-05 11:29:18.153829789 +0000 UTC m=+1292.295952482" lastFinishedPulling="2025-12-05 11:29:53.378018977 +0000 UTC m=+1327.520141670" observedRunningTime="2025-12-05 11:29:55.694441993 +0000 UTC m=+1329.836564706" watchObservedRunningTime="2025-12-05 11:29:55.699207789 +0000 UTC m=+1329.841330482" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.126638 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5cf4cb67d5-pxwtj"] Dec 05 11:29:56 crc kubenswrapper[4728]: E1205 11:29:56.127415 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="init" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.127449 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="init" Dec 05 11:29:56 crc kubenswrapper[4728]: E1205 11:29:56.127461 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.127468 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.127752 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" containerName="dnsmasq-dns" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.129178 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.131811 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.132727 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.152971 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cf4cb67d5-pxwtj"] Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287332 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287380 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-combined-ca-bundle\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287399 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-internal-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287424 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-ovndb-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287449 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-httpd-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287472 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh6c8\" (UniqueName: \"kubernetes.io/projected/9a26b328-f443-4f9a-a2ae-2042e3189096-kube-api-access-gh6c8\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.287506 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-public-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.370442 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e42f3d-f948-4eb4-99cb-842f4e17c69c" path="/var/lib/kubelet/pods/41e42f3d-f948-4eb4-99cb-842f4e17c69c/volumes" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388555 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388602 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-combined-ca-bundle\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388623 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-internal-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388638 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-ovndb-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388661 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-httpd-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388678 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh6c8\" (UniqueName: \"kubernetes.io/projected/9a26b328-f443-4f9a-a2ae-2042e3189096-kube-api-access-gh6c8\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.388710 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-public-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.397484 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.397490 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-httpd-config\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.397617 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-ovndb-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.398730 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-combined-ca-bundle\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.399613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-public-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.406615 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a26b328-f443-4f9a-a2ae-2042e3189096-internal-tls-certs\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.435522 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh6c8\" (UniqueName: \"kubernetes.io/projected/9a26b328-f443-4f9a-a2ae-2042e3189096-kube-api-access-gh6c8\") pod \"neutron-5cf4cb67d5-pxwtj\" (UID: \"9a26b328-f443-4f9a-a2ae-2042e3189096\") " pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:56 crc kubenswrapper[4728]: I1205 11:29:56.454252 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:57 crc kubenswrapper[4728]: I1205 11:29:57.420786 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5cf4cb67d5-pxwtj"] Dec 05 11:29:57 crc kubenswrapper[4728]: W1205 11:29:57.432038 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a26b328_f443_4f9a_a2ae_2042e3189096.slice/crio-f065da759df896b1585776435457d74680c36e685adf61cbb29941de651c9215 WatchSource:0}: Error finding container f065da759df896b1585776435457d74680c36e685adf61cbb29941de651c9215: Status 404 returned error can't find the container with id f065da759df896b1585776435457d74680c36e685adf61cbb29941de651c9215 Dec 05 11:29:57 crc kubenswrapper[4728]: I1205 11:29:57.704669 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cf4cb67d5-pxwtj" event={"ID":"9a26b328-f443-4f9a-a2ae-2042e3189096","Type":"ContainerStarted","Data":"f065da759df896b1585776435457d74680c36e685adf61cbb29941de651c9215"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.733255 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerStarted","Data":"a424612358c0338ec1134ced5d16490f2c4b85530adaef3d897eb3a80f1f2e77"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.746776 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-27gkv" event={"ID":"1a984c5b-a1ab-47ab-9acd-998c05072ea1","Type":"ContainerStarted","Data":"b9647a262dee9f754f4071a8e491ac32fb1b5b744bbd889caa0be4f71a8b42c9"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.755957 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerStarted","Data":"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.763722 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cf4cb67d5-pxwtj" event={"ID":"9a26b328-f443-4f9a-a2ae-2042e3189096","Type":"ContainerStarted","Data":"2a0fd43337ad50bf77cd0b212241f89e34815625b93b915db566e347f7c8c2ac"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.775469 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-log" containerID="cri-o://31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" gracePeriod=30 Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.775620 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-httpd" containerID="cri-o://faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" gracePeriod=30 Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.778298 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-27gkv" podStartSLOduration=6.778281056 podStartE2EDuration="6.778281056s" podCreationTimestamp="2025-12-05 11:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:58.771418834 +0000 UTC m=+1332.913541527" watchObservedRunningTime="2025-12-05 11:29:58.778281056 +0000 UTC m=+1332.920403749" Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.787962 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerStarted","Data":"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd"} Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.788130 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-log" containerID="cri-o://62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" gracePeriod=30 Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.788248 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-httpd" containerID="cri-o://082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" gracePeriod=30 Dec 05 11:29:58 crc kubenswrapper[4728]: I1205 11:29:58.821200 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=37.821167132 podStartE2EDuration="37.821167132s" podCreationTimestamp="2025-12-05 11:29:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:58.815313207 +0000 UTC m=+1332.957435910" watchObservedRunningTime="2025-12-05 11:29:58.821167132 +0000 UTC m=+1332.963289825" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.403232 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.428240 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=38.428068746 podStartE2EDuration="38.428068746s" podCreationTimestamp="2025-12-05 11:29:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:58.848018893 +0000 UTC m=+1332.990141596" watchObservedRunningTime="2025-12-05 11:29:59.428068746 +0000 UTC m=+1333.570191439" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.430190 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549337 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgfkx\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549408 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549445 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549472 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549526 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549547 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549619 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549639 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549661 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549707 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lxxt\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549731 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549754 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549786 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549840 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549859 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run\") pod \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\" (UID: \"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.549877 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run\") pod \"0f9de743-47da-494a-aad4-fa892a1c0677\" (UID: \"0f9de743-47da-494a-aad4-fa892a1c0677\") " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.550661 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs" (OuterVolumeSpecName: "logs") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.551101 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs" (OuterVolumeSpecName: "logs") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.551118 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.551497 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.557643 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts" (OuterVolumeSpecName: "scripts") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.562713 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.562726 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx" (OuterVolumeSpecName: "kube-api-access-wgfkx") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "kube-api-access-wgfkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.568844 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.570112 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt" (OuterVolumeSpecName: "kube-api-access-6lxxt") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "kube-api-access-6lxxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.573711 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts" (OuterVolumeSpecName: "scripts") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.578096 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph" (OuterVolumeSpecName: "ceph") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.578290 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph" (OuterVolumeSpecName: "ceph") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.609640 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.625440 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.627045 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data" (OuterVolumeSpecName: "config-data") pod "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" (UID: "643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654771 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654853 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654866 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654878 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654902 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lxxt\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-kube-api-access-6lxxt\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654915 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654925 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654940 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654952 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654962 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654971 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f9de743-47da-494a-aad4-fa892a1c0677-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654980 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgfkx\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-kube-api-access-wgfkx\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.654989 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.655000 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0f9de743-47da-494a-aad4-fa892a1c0677-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.655014 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.662389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data" (OuterVolumeSpecName: "config-data") pod "0f9de743-47da-494a-aad4-fa892a1c0677" (UID: "0f9de743-47da-494a-aad4-fa892a1c0677"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.715556 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.718098 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.756934 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.756969 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f9de743-47da-494a-aad4-fa892a1c0677-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.756982 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.803402 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerStarted","Data":"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.807457 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5cf4cb67d5-pxwtj" event={"ID":"9a26b328-f443-4f9a-a2ae-2042e3189096","Type":"ContainerStarted","Data":"67b01ecd9e35fcee3ab87096505de9d7768c774f657855d651650a07be52bb1c"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.807693 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.826385 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7755888bd8-shzsv" event={"ID":"841ca27f-0486-413e-975b-4f51b008883a","Type":"ContainerStarted","Data":"748029f24fa5dc7ec6b0ef13e3f4a01e4b6d07f7495e48f95bf898ebcdd2c1ef"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.826429 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7755888bd8-shzsv" event={"ID":"841ca27f-0486-413e-975b-4f51b008883a","Type":"ContainerStarted","Data":"d9869d19c439dde2d64d3434635890f1f59d7886de4e6f094fbc9c44860d15ed"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.828383 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5cf4cb67d5-pxwtj" podStartSLOduration=3.828365489 podStartE2EDuration="3.828365489s" podCreationTimestamp="2025-12-05 11:29:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:59.82727571 +0000 UTC m=+1333.969398403" watchObservedRunningTime="2025-12-05 11:29:59.828365489 +0000 UTC m=+1333.970488202" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833511 4728 generic.go:334] "Generic (PLEG): container finished" podID="0f9de743-47da-494a-aad4-fa892a1c0677" containerID="faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" exitCode=143 Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833542 4728 generic.go:334] "Generic (PLEG): container finished" podID="0f9de743-47da-494a-aad4-fa892a1c0677" containerID="31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" exitCode=143 Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833584 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833604 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f9de743-47da-494a-aad4-fa892a1c0677","Type":"ContainerDied","Data":"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833637 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f9de743-47da-494a-aad4-fa892a1c0677","Type":"ContainerDied","Data":"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833651 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f9de743-47da-494a-aad4-fa892a1c0677","Type":"ContainerDied","Data":"f586635dd81867f490e12b90b97d436a531a5aa8b54e91bbfb3080e317b537a1"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.833670 4728 scope.go:117] "RemoveContainer" containerID="faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837632 4728 generic.go:334] "Generic (PLEG): container finished" podID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerID="082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" exitCode=0 Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837659 4728 generic.go:334] "Generic (PLEG): container finished" podID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerID="62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" exitCode=143 Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837699 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerDied","Data":"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837722 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerDied","Data":"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837737 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104","Type":"ContainerDied","Data":"9434a542e5c22bf5d580e423744419145620f153a2c089c6c17554aef5a15f53"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.837827 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.856658 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerStarted","Data":"38d755236e4efaf1855f3e775b70540d8fa5c965fa203ac9cdcacaa126da088d"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.856939 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.867011 4728 generic.go:334] "Generic (PLEG): container finished" podID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerID="6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1" exitCode=0 Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.868088 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerDied","Data":"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.868116 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerStarted","Data":"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0"} Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.868129 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.929432 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7755888bd8-shzsv" podStartSLOduration=26.171692394 podStartE2EDuration="29.929412386s" podCreationTimestamp="2025-12-05 11:29:30 +0000 UTC" firstStartedPulling="2025-12-05 11:29:55.230888385 +0000 UTC m=+1329.373011078" lastFinishedPulling="2025-12-05 11:29:58.988608377 +0000 UTC m=+1333.130731070" observedRunningTime="2025-12-05 11:29:59.847374263 +0000 UTC m=+1333.989496956" watchObservedRunningTime="2025-12-05 11:29:59.929412386 +0000 UTC m=+1334.071535079" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.956741 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" podStartSLOduration=6.956716659 podStartE2EDuration="6.956716659s" podCreationTimestamp="2025-12-05 11:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:29:59.917308305 +0000 UTC m=+1334.059431008" watchObservedRunningTime="2025-12-05 11:29:59.956716659 +0000 UTC m=+1334.098839362" Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.984132 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:29:59 crc kubenswrapper[4728]: I1205 11:29:59.990969 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.005912 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.010248 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013180 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.013275 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013330 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.013397 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013453 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.013514 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013564 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013880 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.013955 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-httpd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.014010 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.014074 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" containerName="glance-log" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.015009 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.009477 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-76476c596-dh9zg" podStartSLOduration=7.009461466 podStartE2EDuration="7.009461466s" podCreationTimestamp="2025-12-05 11:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:00.008142081 +0000 UTC m=+1334.150264764" watchObservedRunningTime="2025-12-05 11:30:00.009461466 +0000 UTC m=+1334.151584159" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.028066 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.028483 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.028614 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.028735 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.028859 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4w4f7" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.037664 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.049480 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.060636 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.152079 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.154006 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.159992 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.160438 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.173054 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.192314 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193009 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193092 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsfwr\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193122 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193174 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193218 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193264 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193297 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193322 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193362 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.193469 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.202217 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2"] Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.202840 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.203129 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.294959 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295006 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295041 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295057 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295079 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295103 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295125 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlbnp\" (UniqueName: \"kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295140 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295170 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295185 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295209 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsfwr\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295228 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295245 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295261 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxw46\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295290 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295330 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295361 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295376 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295397 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295417 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295432 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.295829 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.298119 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.298341 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.303753 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.305368 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.305983 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.310237 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.311889 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.318773 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsfwr\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.339011 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.350872 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.367152 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f9de743-47da-494a-aad4-fa892a1c0677" path="/var/lib/kubelet/pods/0f9de743-47da-494a-aad4-fa892a1c0677/volumes" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.368007 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104" path="/var/lib/kubelet/pods/643d5db5-49c7-4e3d-8d9b-5d8dc8b9d104/volumes" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399676 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399730 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399751 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399774 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399838 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlbnp\" (UniqueName: \"kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399855 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399884 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399900 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399926 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399945 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxw46\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.399975 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.400019 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.400237 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.408350 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.408670 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.408882 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.412448 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.416484 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.426102 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.426388 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.427071 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.427549 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlbnp\" (UniqueName: \"kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.427781 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume\") pod \"collect-profiles-29415570-hv7p2\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.433099 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.436943 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxw46\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46\") pod \"glance-default-internal-api-0\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.499337 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.520279 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.659496 4728 scope.go:117] "RemoveContainer" containerID="31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.785879 4728 scope.go:117] "RemoveContainer" containerID="faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.786645 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705\": container with ID starting with faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705 not found: ID does not exist" containerID="faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.786670 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705"} err="failed to get container status \"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705\": rpc error: code = NotFound desc = could not find container \"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705\": container with ID starting with faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705 not found: ID does not exist" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.786689 4728 scope.go:117] "RemoveContainer" containerID="31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" Dec 05 11:30:00 crc kubenswrapper[4728]: E1205 11:30:00.786921 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3\": container with ID starting with 31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3 not found: ID does not exist" containerID="31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.786942 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3"} err="failed to get container status \"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3\": rpc error: code = NotFound desc = could not find container \"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3\": container with ID starting with 31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3 not found: ID does not exist" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.786957 4728 scope.go:117] "RemoveContainer" containerID="faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.802905 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705"} err="failed to get container status \"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705\": rpc error: code = NotFound desc = could not find container \"faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705\": container with ID starting with faa91e64733ae46dc879bf9d1eb3e690ccbb52a5b65c17a0c3a7b1244b794705 not found: ID does not exist" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.802949 4728 scope.go:117] "RemoveContainer" containerID="31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.813946 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3"} err="failed to get container status \"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3\": rpc error: code = NotFound desc = could not find container \"31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3\": container with ID starting with 31af6c97d793a61d5038ad8951802770a382d7510d3ef7aa9dfd1e85270a94c3 not found: ID does not exist" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.814120 4728 scope.go:117] "RemoveContainer" containerID="082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.928562 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerStarted","Data":"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8"} Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.934097 4728 generic.go:334] "Generic (PLEG): container finished" podID="198c5a44-d3b2-4afd-b034-d898309e0f42" containerID="1ca97bc11021d729f13692aab870d70e82f18e3fa12ecc24a41950063813b560" exitCode=0 Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.934668 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sg5mr" event={"ID":"198c5a44-d3b2-4afd-b034-d898309e0f42","Type":"ContainerDied","Data":"1ca97bc11021d729f13692aab870d70e82f18e3fa12ecc24a41950063813b560"} Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.957844 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5d7bdb6c68-cfbgd" podStartSLOduration=27.245807924 podStartE2EDuration="30.957829176s" podCreationTimestamp="2025-12-05 11:29:30 +0000 UTC" firstStartedPulling="2025-12-05 11:29:55.051121623 +0000 UTC m=+1329.193244316" lastFinishedPulling="2025-12-05 11:29:58.763142865 +0000 UTC m=+1332.905265568" observedRunningTime="2025-12-05 11:30:00.955915375 +0000 UTC m=+1335.098038078" watchObservedRunningTime="2025-12-05 11:30:00.957829176 +0000 UTC m=+1335.099951879" Dec 05 11:30:00 crc kubenswrapper[4728]: I1205 11:30:00.986683 4728 scope.go:117] "RemoveContainer" containerID="62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.012579 4728 scope.go:117] "RemoveContainer" containerID="082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" Dec 05 11:30:01 crc kubenswrapper[4728]: E1205 11:30:01.017083 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd\": container with ID starting with 082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd not found: ID does not exist" containerID="082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.017130 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd"} err="failed to get container status \"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd\": rpc error: code = NotFound desc = could not find container \"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd\": container with ID starting with 082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd not found: ID does not exist" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.017157 4728 scope.go:117] "RemoveContainer" containerID="62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" Dec 05 11:30:01 crc kubenswrapper[4728]: E1205 11:30:01.020388 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e\": container with ID starting with 62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e not found: ID does not exist" containerID="62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.020432 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e"} err="failed to get container status \"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e\": rpc error: code = NotFound desc = could not find container \"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e\": container with ID starting with 62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e not found: ID does not exist" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.020456 4728 scope.go:117] "RemoveContainer" containerID="082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.023992 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd"} err="failed to get container status \"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd\": rpc error: code = NotFound desc = could not find container \"082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd\": container with ID starting with 082a056062ee2def1f1f0b0df2a3c1635d65404a1b798300bf8fa52eca4643cd not found: ID does not exist" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.024015 4728 scope.go:117] "RemoveContainer" containerID="62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.026033 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e"} err="failed to get container status \"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e\": rpc error: code = NotFound desc = could not find container \"62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e\": container with ID starting with 62c155f1bca6b2511409e49f93e8dd3bbaa617dd604db2245aa174370aeb174e not found: ID does not exist" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.199892 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.199944 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.324581 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2"] Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.400435 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:30:01 crc kubenswrapper[4728]: W1205 11:30:01.412547 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda20bde59_1d28_4f65_8dc4_26808e4abb01.slice/crio-7b8b61a6d8415eac85f9109329faf2d430993d276ffe8cf4c875dd6c7a250d3b WatchSource:0}: Error finding container 7b8b61a6d8415eac85f9109329faf2d430993d276ffe8cf4c875dd6c7a250d3b: Status 404 returned error can't find the container with id 7b8b61a6d8415eac85f9109329faf2d430993d276ffe8cf4c875dd6c7a250d3b Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.944694 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerStarted","Data":"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302"} Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.946991 4728 generic.go:334] "Generic (PLEG): container finished" podID="9773570d-8d08-4620-8911-40e5ecd02aef" containerID="51a3d337d1fbf45a1a83f48dc99cbbc7fc7f463c92483cc0f5fae249c0eb0872" exitCode=0 Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.947052 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" event={"ID":"9773570d-8d08-4620-8911-40e5ecd02aef","Type":"ContainerDied","Data":"51a3d337d1fbf45a1a83f48dc99cbbc7fc7f463c92483cc0f5fae249c0eb0872"} Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.947074 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" event={"ID":"9773570d-8d08-4620-8911-40e5ecd02aef","Type":"ContainerStarted","Data":"346f9a36328354364325b889c5508c5976affb6a89fcde2ac46c68ad216d417c"} Dec 05 11:30:01 crc kubenswrapper[4728]: I1205 11:30:01.948769 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerStarted","Data":"7b8b61a6d8415eac85f9109329faf2d430993d276ffe8cf4c875dd6c7a250d3b"} Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.342945 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.405430 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sg5mr" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.539366 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts\") pod \"198c5a44-d3b2-4afd-b034-d898309e0f42\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.539476 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data\") pod \"198c5a44-d3b2-4afd-b034-d898309e0f42\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.539542 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle\") pod \"198c5a44-d3b2-4afd-b034-d898309e0f42\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.539619 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w59rs\" (UniqueName: \"kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs\") pod \"198c5a44-d3b2-4afd-b034-d898309e0f42\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.539676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs\") pod \"198c5a44-d3b2-4afd-b034-d898309e0f42\" (UID: \"198c5a44-d3b2-4afd-b034-d898309e0f42\") " Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.540380 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs" (OuterVolumeSpecName: "logs") pod "198c5a44-d3b2-4afd-b034-d898309e0f42" (UID: "198c5a44-d3b2-4afd-b034-d898309e0f42"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.546469 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts" (OuterVolumeSpecName: "scripts") pod "198c5a44-d3b2-4afd-b034-d898309e0f42" (UID: "198c5a44-d3b2-4afd-b034-d898309e0f42"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.546913 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs" (OuterVolumeSpecName: "kube-api-access-w59rs") pod "198c5a44-d3b2-4afd-b034-d898309e0f42" (UID: "198c5a44-d3b2-4afd-b034-d898309e0f42"). InnerVolumeSpecName "kube-api-access-w59rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.573980 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data" (OuterVolumeSpecName: "config-data") pod "198c5a44-d3b2-4afd-b034-d898309e0f42" (UID: "198c5a44-d3b2-4afd-b034-d898309e0f42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.582584 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "198c5a44-d3b2-4afd-b034-d898309e0f42" (UID: "198c5a44-d3b2-4afd-b034-d898309e0f42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.641328 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.641592 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.641601 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/198c5a44-d3b2-4afd-b034-d898309e0f42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.641609 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w59rs\" (UniqueName: \"kubernetes.io/projected/198c5a44-d3b2-4afd-b034-d898309e0f42-kube-api-access-w59rs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.641618 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/198c5a44-d3b2-4afd-b034-d898309e0f42-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.965626 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerStarted","Data":"af91f23b4932b1dc7afe90ba566dde86274ba538be2249b9ada444826eb680dd"} Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.968134 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerStarted","Data":"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2"} Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.968159 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerStarted","Data":"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f"} Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.974535 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sg5mr" event={"ID":"198c5a44-d3b2-4afd-b034-d898309e0f42","Type":"ContainerDied","Data":"0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3"} Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.974605 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sg5mr" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.974720 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e883e48d12b65b602e62c339ebf292ddeaa02aa6bd722d4c046fd94731cacd3" Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.976941 4728 generic.go:334] "Generic (PLEG): container finished" podID="1a984c5b-a1ab-47ab-9acd-998c05072ea1" containerID="b9647a262dee9f754f4071a8e491ac32fb1b5b744bbd889caa0be4f71a8b42c9" exitCode=0 Dec 05 11:30:02 crc kubenswrapper[4728]: I1205 11:30:02.976999 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-27gkv" event={"ID":"1a984c5b-a1ab-47ab-9acd-998c05072ea1","Type":"ContainerDied","Data":"b9647a262dee9f754f4071a8e491ac32fb1b5b744bbd889caa0be4f71a8b42c9"} Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.022283 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.022268487 podStartE2EDuration="4.022268487s" podCreationTimestamp="2025-12-05 11:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:03.019382461 +0000 UTC m=+1337.161505164" watchObservedRunningTime="2025-12-05 11:30:03.022268487 +0000 UTC m=+1337.164391180" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.091980 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-554ccc7b5b-l2c6v"] Dec 05 11:30:03 crc kubenswrapper[4728]: E1205 11:30:03.092443 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="198c5a44-d3b2-4afd-b034-d898309e0f42" containerName="placement-db-sync" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.092458 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="198c5a44-d3b2-4afd-b034-d898309e0f42" containerName="placement-db-sync" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.092706 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="198c5a44-d3b2-4afd-b034-d898309e0f42" containerName="placement-db-sync" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.093836 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.099190 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v7fpr" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.099556 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.099677 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.100149 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.100231 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.141868 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-554ccc7b5b-l2c6v"] Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256542 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-internal-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256619 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-combined-ca-bundle\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256646 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-public-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256717 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-config-data\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256745 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-scripts\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d32022-fd6b-4ecd-83d4-5b628f19e413-logs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.256898 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7v8q\" (UniqueName: \"kubernetes.io/projected/d7d32022-fd6b-4ecd-83d4-5b628f19e413-kube-api-access-z7v8q\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358223 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-scripts\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358550 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d32022-fd6b-4ecd-83d4-5b628f19e413-logs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358604 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7v8q\" (UniqueName: \"kubernetes.io/projected/d7d32022-fd6b-4ecd-83d4-5b628f19e413-kube-api-access-z7v8q\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358693 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-internal-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358764 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-combined-ca-bundle\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358808 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-public-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.358852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-config-data\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.361691 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d32022-fd6b-4ecd-83d4-5b628f19e413-logs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.364135 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-scripts\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.364290 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-config-data\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.364960 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-internal-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.368389 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-combined-ca-bundle\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.369230 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d32022-fd6b-4ecd-83d4-5b628f19e413-public-tls-certs\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.378388 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7v8q\" (UniqueName: \"kubernetes.io/projected/d7d32022-fd6b-4ecd-83d4-5b628f19e413-kube-api-access-z7v8q\") pod \"placement-554ccc7b5b-l2c6v\" (UID: \"d7d32022-fd6b-4ecd-83d4-5b628f19e413\") " pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.414836 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.511243 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.677463 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume\") pod \"9773570d-8d08-4620-8911-40e5ecd02aef\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.677858 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume\") pod \"9773570d-8d08-4620-8911-40e5ecd02aef\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.677936 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlbnp\" (UniqueName: \"kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp\") pod \"9773570d-8d08-4620-8911-40e5ecd02aef\" (UID: \"9773570d-8d08-4620-8911-40e5ecd02aef\") " Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.678833 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume" (OuterVolumeSpecName: "config-volume") pod "9773570d-8d08-4620-8911-40e5ecd02aef" (UID: "9773570d-8d08-4620-8911-40e5ecd02aef"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.684157 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp" (OuterVolumeSpecName: "kube-api-access-vlbnp") pod "9773570d-8d08-4620-8911-40e5ecd02aef" (UID: "9773570d-8d08-4620-8911-40e5ecd02aef"). InnerVolumeSpecName "kube-api-access-vlbnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.687243 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9773570d-8d08-4620-8911-40e5ecd02aef" (UID: "9773570d-8d08-4620-8911-40e5ecd02aef"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.780572 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlbnp\" (UniqueName: \"kubernetes.io/projected/9773570d-8d08-4620-8911-40e5ecd02aef-kube-api-access-vlbnp\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.780614 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9773570d-8d08-4620-8911-40e5ecd02aef-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.780628 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9773570d-8d08-4620-8911-40e5ecd02aef-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.999058 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" event={"ID":"9773570d-8d08-4620-8911-40e5ecd02aef","Type":"ContainerDied","Data":"346f9a36328354364325b889c5508c5976affb6a89fcde2ac46c68ad216d417c"} Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.999134 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="346f9a36328354364325b889c5508c5976affb6a89fcde2ac46c68ad216d417c" Dec 05 11:30:03 crc kubenswrapper[4728]: I1205 11:30:03.999071 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2" Dec 05 11:30:04 crc kubenswrapper[4728]: I1205 11:30:04.006551 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-554ccc7b5b-l2c6v"] Dec 05 11:30:04 crc kubenswrapper[4728]: I1205 11:30:04.013061 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerStarted","Data":"20438e594ebc0eb329f8e3f19d1a4a98d23077a99329c007cc5b01d1469da530"} Dec 05 11:30:04 crc kubenswrapper[4728]: I1205 11:30:04.292004 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:30:04 crc kubenswrapper[4728]: I1205 11:30:04.368587 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:30:04 crc kubenswrapper[4728]: I1205 11:30:04.368811 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="dnsmasq-dns" containerID="cri-o://f9d3a8d0d5662b90a3cbb65dc1936090e52994eecc9ea5045d9019598ed88cdd" gracePeriod=10 Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.022707 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-554ccc7b5b-l2c6v" event={"ID":"d7d32022-fd6b-4ecd-83d4-5b628f19e413","Type":"ContainerStarted","Data":"83d864b505ac9b6e1b958820637a6ca5e2019c2a5ce8fe6dacfb2bd02d27cbb7"} Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.117460 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202436 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202555 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202584 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202610 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202643 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.202838 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jwpb\" (UniqueName: \"kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb\") pod \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\" (UID: \"1a984c5b-a1ab-47ab-9acd-998c05072ea1\") " Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.211194 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.213458 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb" (OuterVolumeSpecName: "kube-api-access-6jwpb") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "kube-api-access-6jwpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.214761 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.229197 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts" (OuterVolumeSpecName: "scripts") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.247539 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data" (OuterVolumeSpecName: "config-data") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.247972 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a984c5b-a1ab-47ab-9acd-998c05072ea1" (UID: "1a984c5b-a1ab-47ab-9acd-998c05072ea1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304581 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jwpb\" (UniqueName: \"kubernetes.io/projected/1a984c5b-a1ab-47ab-9acd-998c05072ea1-kube-api-access-6jwpb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304613 4728 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304622 4728 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304630 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304638 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:05 crc kubenswrapper[4728]: I1205 11:30:05.304645 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a984c5b-a1ab-47ab-9acd-998c05072ea1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.033244 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-27gkv" event={"ID":"1a984c5b-a1ab-47ab-9acd-998c05072ea1","Type":"ContainerDied","Data":"9b759cbb9491aaceff3419835e53693db3728ea44a6e24ed7238930655d04bac"} Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.033354 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-27gkv" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.033543 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b759cbb9491aaceff3419835e53693db3728ea44a6e24ed7238930655d04bac" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.035349 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-554ccc7b5b-l2c6v" event={"ID":"d7d32022-fd6b-4ecd-83d4-5b628f19e413","Type":"ContainerStarted","Data":"ffe07fa82aad656f1cd50ff1799f4baa3c99bebcd049d098d2853761ab078aa5"} Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.037343 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerStarted","Data":"4355c7f8f8c64082fee96e7eaee0da584c497118fd2025378398d3ccddf09e3d"} Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.040315 4728 generic.go:334] "Generic (PLEG): container finished" podID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerID="f9d3a8d0d5662b90a3cbb65dc1936090e52994eecc9ea5045d9019598ed88cdd" exitCode=0 Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.040341 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" event={"ID":"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1","Type":"ContainerDied","Data":"f9d3a8d0d5662b90a3cbb65dc1936090e52994eecc9ea5045d9019598ed88cdd"} Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.080722 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.080698408 podStartE2EDuration="6.080698408s" podCreationTimestamp="2025-12-05 11:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:06.069317596 +0000 UTC m=+1340.211440309" watchObservedRunningTime="2025-12-05 11:30:06.080698408 +0000 UTC m=+1340.222821121" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.242911 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6fbd7fcb8c-kr5v8"] Dec 05 11:30:06 crc kubenswrapper[4728]: E1205 11:30:06.243346 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9773570d-8d08-4620-8911-40e5ecd02aef" containerName="collect-profiles" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.243368 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9773570d-8d08-4620-8911-40e5ecd02aef" containerName="collect-profiles" Dec 05 11:30:06 crc kubenswrapper[4728]: E1205 11:30:06.243389 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a984c5b-a1ab-47ab-9acd-998c05072ea1" containerName="keystone-bootstrap" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.243395 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a984c5b-a1ab-47ab-9acd-998c05072ea1" containerName="keystone-bootstrap" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.243559 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9773570d-8d08-4620-8911-40e5ecd02aef" containerName="collect-profiles" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.243586 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a984c5b-a1ab-47ab-9acd-998c05072ea1" containerName="keystone-bootstrap" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.244177 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.248327 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.248634 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.253148 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fbd7fcb8c-kr5v8"] Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.256415 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.256518 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.256412 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-rz6qv" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.256629 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.324675 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-combined-ca-bundle\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.324884 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-internal-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.324938 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-config-data\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.324992 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68hds\" (UniqueName: \"kubernetes.io/projected/523f920a-f4d7-46db-8066-ad0c4f8d22d5-kube-api-access-68hds\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.325016 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-fernet-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.325068 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-public-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.325160 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-scripts\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.325267 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-credential-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429725 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-config-data\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429776 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68hds\" (UniqueName: \"kubernetes.io/projected/523f920a-f4d7-46db-8066-ad0c4f8d22d5-kube-api-access-68hds\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429806 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-fernet-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429824 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-public-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429860 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-scripts\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429909 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-credential-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.429963 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-combined-ca-bundle\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.430009 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-internal-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.437330 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-internal-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.441509 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-credential-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.444191 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-public-tls-certs\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.444211 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-fernet-keys\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.444496 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-combined-ca-bundle\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.450130 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-scripts\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.467477 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523f920a-f4d7-46db-8066-ad0c4f8d22d5-config-data\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.473438 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68hds\" (UniqueName: \"kubernetes.io/projected/523f920a-f4d7-46db-8066-ad0c4f8d22d5-kube-api-access-68hds\") pod \"keystone-6fbd7fcb8c-kr5v8\" (UID: \"523f920a-f4d7-46db-8066-ad0c4f8d22d5\") " pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:06 crc kubenswrapper[4728]: I1205 11:30:06.574179 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.352346 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469092 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469144 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469184 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469276 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469324 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxbbm\" (UniqueName: \"kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.469366 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config\") pod \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\" (UID: \"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1\") " Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.475172 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm" (OuterVolumeSpecName: "kube-api-access-zxbbm") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "kube-api-access-zxbbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.513537 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.521602 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config" (OuterVolumeSpecName: "config") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.523490 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.528736 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.536359 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" (UID: "5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571015 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571049 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571062 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571071 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571079 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxbbm\" (UniqueName: \"kubernetes.io/projected/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-kube-api-access-zxbbm\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:08 crc kubenswrapper[4728]: I1205 11:30:08.571088 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:09 crc kubenswrapper[4728]: I1205 11:30:09.073503 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" event={"ID":"5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1","Type":"ContainerDied","Data":"47a847aa13d9f64fae9288c7af7228c7d178ca8b00602da601fd32e0d5b55c66"} Dec 05 11:30:09 crc kubenswrapper[4728]: I1205 11:30:09.073552 4728 scope.go:117] "RemoveContainer" containerID="f9d3a8d0d5662b90a3cbb65dc1936090e52994eecc9ea5045d9019598ed88cdd" Dec 05 11:30:09 crc kubenswrapper[4728]: I1205 11:30:09.073647 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-snxbb" Dec 05 11:30:09 crc kubenswrapper[4728]: I1205 11:30:09.120912 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:30:09 crc kubenswrapper[4728]: I1205 11:30:09.128029 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-snxbb"] Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.362733 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" path="/var/lib/kubelet/pods/5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1/volumes" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.363729 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.363764 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.385230 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.400583 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.500837 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.501196 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.532023 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.552564 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.554743 4728 scope.go:117] "RemoveContainer" containerID="681f11ee3ecda221214b21a459c08c722f866b98a91ed4b958121041d9cad81b" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.898942 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.900552 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Dec 05 11:30:10 crc kubenswrapper[4728]: I1205 11:30:10.910951 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.102042 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.102250 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.102260 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.102355 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.136733 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fbd7fcb8c-kr5v8"] Dec 05 11:30:11 crc kubenswrapper[4728]: I1205 11:30:11.217986 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7755888bd8-shzsv" podUID="841ca27f-0486-413e-975b-4f51b008883a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.118199 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fbd7fcb8c-kr5v8" event={"ID":"523f920a-f4d7-46db-8066-ad0c4f8d22d5","Type":"ContainerStarted","Data":"d41839b2dade227a00f6d67f4fa2c1cd29d6cf3527ebe8f84de9bb96a7fbe723"} Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.118532 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fbd7fcb8c-kr5v8" event={"ID":"523f920a-f4d7-46db-8066-ad0c4f8d22d5","Type":"ContainerStarted","Data":"bbf49c5a8c5085cc7ea0f560b49c8a9223a7f14d71ef96db6381ba5b247f7baf"} Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.118582 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.127842 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerStarted","Data":"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53"} Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.130683 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-554ccc7b5b-l2c6v" event={"ID":"d7d32022-fd6b-4ecd-83d4-5b628f19e413","Type":"ContainerStarted","Data":"c554240c5a226d8685a63cf74bf41976eb42628ca2b8e10a3e1d4a18ac1c7f64"} Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.130745 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.130760 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.132328 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fm2df" event={"ID":"555e531f-162f-4097-ba36-53b6ddedd6d8","Type":"ContainerStarted","Data":"cb05f6ad3f1272fa578ed38c9b80c038868355b7da27c9a78c973da8868ebff3"} Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.158303 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6fbd7fcb8c-kr5v8" podStartSLOduration=6.158283337 podStartE2EDuration="6.158283337s" podCreationTimestamp="2025-12-05 11:30:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:12.154362613 +0000 UTC m=+1346.296485306" watchObservedRunningTime="2025-12-05 11:30:12.158283337 +0000 UTC m=+1346.300406040" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.180054 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-554ccc7b5b-l2c6v" podStartSLOduration=9.180039443 podStartE2EDuration="9.180039443s" podCreationTimestamp="2025-12-05 11:30:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:12.178954485 +0000 UTC m=+1346.321077188" watchObservedRunningTime="2025-12-05 11:30:12.180039443 +0000 UTC m=+1346.322162136" Dec 05 11:30:12 crc kubenswrapper[4728]: I1205 11:30:12.199820 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-fm2df" podStartSLOduration=3.346193791 podStartE2EDuration="56.199788606s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="2025-12-05 11:29:17.957703264 +0000 UTC m=+1292.099825957" lastFinishedPulling="2025-12-05 11:30:10.811298079 +0000 UTC m=+1344.953420772" observedRunningTime="2025-12-05 11:30:12.197832075 +0000 UTC m=+1346.339954768" watchObservedRunningTime="2025-12-05 11:30:12.199788606 +0000 UTC m=+1346.341911299" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.148696 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-zbbv6" event={"ID":"537c7276-c2c9-4427-9b2b-5e835e3bc2d7","Type":"ContainerStarted","Data":"8ef8f65ad94c428c88af66587e1fa39c7c9a3e85582c08325973ab5fff66cdaa"} Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.159006 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.159166 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.159001 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dtppr" event={"ID":"875f6746-18ef-483c-bbb4-80d7dbe4b1a1","Type":"ContainerStarted","Data":"abf8576f46b6ec4882d872d4cf1e3cf3478459a0b6aacba5878baf11162ff85b"} Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.159164 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.159304 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.169270 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-zbbv6" podStartSLOduration=4.470451461 podStartE2EDuration="57.169252295s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="2025-12-05 11:29:18.180032123 +0000 UTC m=+1292.322154816" lastFinishedPulling="2025-12-05 11:30:10.878832957 +0000 UTC m=+1345.020955650" observedRunningTime="2025-12-05 11:30:13.162329642 +0000 UTC m=+1347.304452345" watchObservedRunningTime="2025-12-05 11:30:13.169252295 +0000 UTC m=+1347.311374988" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.191037 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-dtppr" podStartSLOduration=4.126769178 podStartE2EDuration="57.191022522s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="2025-12-05 11:29:17.648860744 +0000 UTC m=+1291.790983437" lastFinishedPulling="2025-12-05 11:30:10.713114088 +0000 UTC m=+1344.855236781" observedRunningTime="2025-12-05 11:30:13.17999335 +0000 UTC m=+1347.322116033" watchObservedRunningTime="2025-12-05 11:30:13.191022522 +0000 UTC m=+1347.333145205" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.396663 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.416530 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.530454 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.674564 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:30:13 crc kubenswrapper[4728]: I1205 11:30:13.794678 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:16 crc kubenswrapper[4728]: I1205 11:30:16.191401 4728 generic.go:334] "Generic (PLEG): container finished" podID="555e531f-162f-4097-ba36-53b6ddedd6d8" containerID="cb05f6ad3f1272fa578ed38c9b80c038868355b7da27c9a78c973da8868ebff3" exitCode=0 Dec 05 11:30:16 crc kubenswrapper[4728]: I1205 11:30:16.191482 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fm2df" event={"ID":"555e531f-162f-4097-ba36-53b6ddedd6d8","Type":"ContainerDied","Data":"cb05f6ad3f1272fa578ed38c9b80c038868355b7da27c9a78c973da8868ebff3"} Dec 05 11:30:20 crc kubenswrapper[4728]: I1205 11:30:20.893871 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Dec 05 11:30:20 crc kubenswrapper[4728]: I1205 11:30:20.946819 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fm2df" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.080248 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data\") pod \"555e531f-162f-4097-ba36-53b6ddedd6d8\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.080497 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle\") pod \"555e531f-162f-4097-ba36-53b6ddedd6d8\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.080670 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkvlk\" (UniqueName: \"kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk\") pod \"555e531f-162f-4097-ba36-53b6ddedd6d8\" (UID: \"555e531f-162f-4097-ba36-53b6ddedd6d8\") " Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.089899 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "555e531f-162f-4097-ba36-53b6ddedd6d8" (UID: "555e531f-162f-4097-ba36-53b6ddedd6d8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.090004 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk" (OuterVolumeSpecName: "kube-api-access-qkvlk") pod "555e531f-162f-4097-ba36-53b6ddedd6d8" (UID: "555e531f-162f-4097-ba36-53b6ddedd6d8"). InnerVolumeSpecName "kube-api-access-qkvlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.131320 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "555e531f-162f-4097-ba36-53b6ddedd6d8" (UID: "555e531f-162f-4097-ba36-53b6ddedd6d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.182510 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkvlk\" (UniqueName: \"kubernetes.io/projected/555e531f-162f-4097-ba36-53b6ddedd6d8-kube-api-access-qkvlk\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.182809 4728 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.182944 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/555e531f-162f-4097-ba36-53b6ddedd6d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.199920 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7755888bd8-shzsv" podUID="841ca27f-0486-413e-975b-4f51b008883a" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.245934 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fm2df" event={"ID":"555e531f-162f-4097-ba36-53b6ddedd6d8","Type":"ContainerDied","Data":"6ceb4b0f4ba9e2a2ac68ef8bd37e7619389cc8d5de9c516c436892269fa4c530"} Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.245967 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ceb4b0f4ba9e2a2ac68ef8bd37e7619389cc8d5de9c516c436892269fa4c530" Dec 05 11:30:21 crc kubenswrapper[4728]: I1205 11:30:21.246013 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fm2df" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.197560 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7f87fccb55-qhksr"] Dec 05 11:30:22 crc kubenswrapper[4728]: E1205 11:30:22.198227 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" containerName="barbican-db-sync" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.198240 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" containerName="barbican-db-sync" Dec 05 11:30:22 crc kubenswrapper[4728]: E1205 11:30:22.198254 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="dnsmasq-dns" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.198260 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="dnsmasq-dns" Dec 05 11:30:22 crc kubenswrapper[4728]: E1205 11:30:22.198287 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="init" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.198293 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="init" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.198439 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a8bd9ce-fdd1-4d9e-a9c8-5ecf3b7ba7b1" containerName="dnsmasq-dns" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.198454 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" containerName="barbican-db-sync" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.199334 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.202565 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.202757 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.202898 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vxzsz" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.235177 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f87fccb55-qhksr"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.266146 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-76974b5d9d-khzwj"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.266667 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-central-agent" containerID="cri-o://0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d" gracePeriod=30 Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.266822 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="proxy-httpd" containerID="cri-o://e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec" gracePeriod=30 Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.266876 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="sg-core" containerID="cri-o://ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53" gracePeriod=30 Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.266918 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-notification-agent" containerID="cri-o://900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302" gracePeriod=30 Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.267569 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerStarted","Data":"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec"} Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.267651 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.268096 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.270770 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.281386 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76974b5d9d-khzwj"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.303829 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-combined-ca-bundle\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.303906 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.303930 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbljv\" (UniqueName: \"kubernetes.io/projected/2b11e743-92a0-4601-8cdf-935c3cc54a55-kube-api-access-lbljv\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.304016 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data-custom\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.304072 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b11e743-92a0-4601-8cdf-935c3cc54a55-logs\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.333968 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.335479 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.394305 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409703 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data-custom\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409784 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-combined-ca-bundle\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409832 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409847 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbljv\" (UniqueName: \"kubernetes.io/projected/2b11e743-92a0-4601-8cdf-935c3cc54a55-kube-api-access-lbljv\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409879 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8w5w\" (UniqueName: \"kubernetes.io/projected/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-kube-api-access-f8w5w\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409910 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409946 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data-custom\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409966 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-combined-ca-bundle\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.409991 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-logs\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.410016 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b11e743-92a0-4601-8cdf-935c3cc54a55-logs\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.410436 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b11e743-92a0-4601-8cdf-935c3cc54a55-logs\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.423276 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data-custom\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.425228 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-combined-ca-bundle\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.432263 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b11e743-92a0-4601-8cdf-935c3cc54a55-config-data\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.447051 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.367227949 podStartE2EDuration="1m6.447014139s" podCreationTimestamp="2025-12-05 11:29:16 +0000 UTC" firstStartedPulling="2025-12-05 11:29:18.108153869 +0000 UTC m=+1292.250276562" lastFinishedPulling="2025-12-05 11:30:21.187940049 +0000 UTC m=+1355.330062752" observedRunningTime="2025-12-05 11:30:22.428162399 +0000 UTC m=+1356.570285092" watchObservedRunningTime="2025-12-05 11:30:22.447014139 +0000 UTC m=+1356.589136822" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.459048 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbljv\" (UniqueName: \"kubernetes.io/projected/2b11e743-92a0-4601-8cdf-935c3cc54a55-kube-api-access-lbljv\") pod \"barbican-worker-7f87fccb55-qhksr\" (UID: \"2b11e743-92a0-4601-8cdf-935c3cc54a55\") " pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.512934 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.512981 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data-custom\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513009 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513027 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513089 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnstr\" (UniqueName: \"kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513151 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513174 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8w5w\" (UniqueName: \"kubernetes.io/projected/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-kube-api-access-f8w5w\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513198 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513220 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513285 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-combined-ca-bundle\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513310 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-logs\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.513741 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-logs\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.518328 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data-custom\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.519471 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-config-data\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.525949 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f87fccb55-qhksr" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.542468 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-combined-ca-bundle\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.549386 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8w5w\" (UniqueName: \"kubernetes.io/projected/37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2-kube-api-access-f8w5w\") pod \"barbican-keystone-listener-76974b5d9d-khzwj\" (UID: \"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2\") " pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.585724 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.614847 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.614908 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.614927 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.614967 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnstr\" (UniqueName: \"kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.615005 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.615034 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.615742 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.616582 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.616684 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.616738 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.617228 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.642507 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnstr\" (UniqueName: \"kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr\") pod \"dnsmasq-dns-85ff748b95-kj9k8\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.680560 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.690539 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.711662 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.714992 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.722314 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.822396 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.822886 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.822978 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.823039 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrbdm\" (UniqueName: \"kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.823072 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925072 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925176 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925244 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925301 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrbdm\" (UniqueName: \"kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925323 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.925818 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.931579 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.931878 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.933232 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:22 crc kubenswrapper[4728]: I1205 11:30:22.942317 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrbdm\" (UniqueName: \"kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm\") pod \"barbican-api-6d6466fdb6-7fhbh\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.047078 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.079822 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f87fccb55-qhksr"] Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.106838 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:23 crc kubenswrapper[4728]: W1205 11:30:23.108238 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b11e743_92a0_4601_8cdf_935c3cc54a55.slice/crio-009f70e544c356d7fa2862125d22910ad1d4b14d34a1b195207ce06c0b2f74eb WatchSource:0}: Error finding container 009f70e544c356d7fa2862125d22910ad1d4b14d34a1b195207ce06c0b2f74eb: Status 404 returned error can't find the container with id 009f70e544c356d7fa2862125d22910ad1d4b14d34a1b195207ce06c0b2f74eb Dec 05 11:30:23 crc kubenswrapper[4728]: W1205 11:30:23.126177 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cd833b3_79f5_41f4_8d37_07b81c016e23.slice/crio-0d0465bd2e607458d468524a8b64606b1ad81542c15034a64b424e491b680b42 WatchSource:0}: Error finding container 0d0465bd2e607458d468524a8b64606b1ad81542c15034a64b424e491b680b42: Status 404 returned error can't find the container with id 0d0465bd2e607458d468524a8b64606b1ad81542c15034a64b424e491b680b42 Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.232002 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76974b5d9d-khzwj"] Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.277324 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f87fccb55-qhksr" event={"ID":"2b11e743-92a0-4601-8cdf-935c3cc54a55","Type":"ContainerStarted","Data":"009f70e544c356d7fa2862125d22910ad1d4b14d34a1b195207ce06c0b2f74eb"} Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.282423 4728 generic.go:334] "Generic (PLEG): container finished" podID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" containerID="abf8576f46b6ec4882d872d4cf1e3cf3478459a0b6aacba5878baf11162ff85b" exitCode=0 Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.282486 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dtppr" event={"ID":"875f6746-18ef-483c-bbb4-80d7dbe4b1a1","Type":"ContainerDied","Data":"abf8576f46b6ec4882d872d4cf1e3cf3478459a0b6aacba5878baf11162ff85b"} Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.286720 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" event={"ID":"6cd833b3-79f5-41f4-8d37-07b81c016e23","Type":"ContainerStarted","Data":"0d0465bd2e607458d468524a8b64606b1ad81542c15034a64b424e491b680b42"} Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290468 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerID="e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec" exitCode=0 Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290493 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerID="ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53" exitCode=2 Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290501 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerID="0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d" exitCode=0 Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290522 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerDied","Data":"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec"} Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290547 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerDied","Data":"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53"} Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.290556 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerDied","Data":"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d"} Dec 05 11:30:23 crc kubenswrapper[4728]: W1205 11:30:23.345955 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37ac3cc7_ebdb_4ba9_97b6_c62b482b49c2.slice/crio-7dfc36a1ce63eb795456f6e09c6acd3a23420c315d8bf803de54f68bd04b932c WatchSource:0}: Error finding container 7dfc36a1ce63eb795456f6e09c6acd3a23420c315d8bf803de54f68bd04b932c: Status 404 returned error can't find the container with id 7dfc36a1ce63eb795456f6e09c6acd3a23420c315d8bf803de54f68bd04b932c Dec 05 11:30:23 crc kubenswrapper[4728]: I1205 11:30:23.550652 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.299688 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerStarted","Data":"652f04c78f6b6b46fd9dc695423eec317fb5cf018fd85fca0d78016a62b747f5"} Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.300014 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerStarted","Data":"a17cdf0d6d59bb47b7c1c9d368a73bee178275f3c086c669fa938b563c01a115"} Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.300033 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.300042 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerStarted","Data":"683b15eb91e6f9c6665a00ff0da11e1556269d4472e164c17192b2d73e2e256d"} Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.300057 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.301478 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" event={"ID":"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2","Type":"ContainerStarted","Data":"7dfc36a1ce63eb795456f6e09c6acd3a23420c315d8bf803de54f68bd04b932c"} Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.303611 4728 generic.go:334] "Generic (PLEG): container finished" podID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerID="97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73" exitCode=0 Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.304151 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" event={"ID":"6cd833b3-79f5-41f4-8d37-07b81c016e23","Type":"ContainerDied","Data":"97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73"} Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.324744 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6d6466fdb6-7fhbh" podStartSLOduration=2.324728715 podStartE2EDuration="2.324728715s" podCreationTimestamp="2025-12-05 11:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:24.323467692 +0000 UTC m=+1358.465590385" watchObservedRunningTime="2025-12-05 11:30:24.324728715 +0000 UTC m=+1358.466851408" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.430858 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.684518 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dtppr" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.874457 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wz7k\" (UniqueName: \"kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.874806 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.874885 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.874933 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.874975 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.875031 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data\") pod \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\" (UID: \"875f6746-18ef-483c-bbb4-80d7dbe4b1a1\") " Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.876200 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.884950 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.887281 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts" (OuterVolumeSpecName: "scripts") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.888032 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k" (OuterVolumeSpecName: "kube-api-access-6wz7k") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "kube-api-access-6wz7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.918049 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.940215 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data" (OuterVolumeSpecName: "config-data") pod "875f6746-18ef-483c-bbb4-80d7dbe4b1a1" (UID: "875f6746-18ef-483c-bbb4-80d7dbe4b1a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.977953 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wz7k\" (UniqueName: \"kubernetes.io/projected/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-kube-api-access-6wz7k\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.978347 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.978409 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.978483 4728 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.978535 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:24 crc kubenswrapper[4728]: I1205 11:30:24.978626 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875f6746-18ef-483c-bbb4-80d7dbe4b1a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.336458 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dtppr" event={"ID":"875f6746-18ef-483c-bbb4-80d7dbe4b1a1","Type":"ContainerDied","Data":"7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c"} Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.336504 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a2a979f8db4594fe9e1ee92c5f841dbb1c6198152fa77f8d12a79d75aa9cd5c" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.336556 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dtppr" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.346420 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" event={"ID":"6cd833b3-79f5-41f4-8d37-07b81c016e23","Type":"ContainerStarted","Data":"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118"} Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.379108 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-798767f9fd-kfrfz"] Dec 05 11:30:25 crc kubenswrapper[4728]: E1205 11:30:25.379751 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" containerName="cinder-db-sync" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.379768 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" containerName="cinder-db-sync" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.379972 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" containerName="cinder-db-sync" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.380913 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.385917 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-798767f9fd-kfrfz"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.387154 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" podStartSLOduration=3.387138105 podStartE2EDuration="3.387138105s" podCreationTimestamp="2025-12-05 11:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:25.36167286 +0000 UTC m=+1359.503795563" watchObservedRunningTime="2025-12-05 11:30:25.387138105 +0000 UTC m=+1359.529260798" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.395476 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.395578 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488093 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-combined-ca-bundle\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488201 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24fcf86b-13a3-46c0-bea6-37ef4da29b48-logs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488247 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-public-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488297 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488336 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data-custom\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488369 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-internal-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.488391 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rdvj\" (UniqueName: \"kubernetes.io/projected/24fcf86b-13a3-46c0-bea6-37ef4da29b48-kube-api-access-2rdvj\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.544206 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.547480 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.553131 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.553253 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5pzsc" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.553378 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.553753 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.555785 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590376 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-combined-ca-bundle\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590488 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24fcf86b-13a3-46c0-bea6-37ef4da29b48-logs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590520 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-public-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590547 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590583 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data-custom\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590599 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-internal-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590618 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rdvj\" (UniqueName: \"kubernetes.io/projected/24fcf86b-13a3-46c0-bea6-37ef4da29b48-kube-api-access-2rdvj\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.590874 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24fcf86b-13a3-46c0-bea6-37ef4da29b48-logs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.605728 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.608013 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-internal-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.615922 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-config-data-custom\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.617352 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-public-tls-certs\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.618462 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rdvj\" (UniqueName: \"kubernetes.io/projected/24fcf86b-13a3-46c0-bea6-37ef4da29b48-kube-api-access-2rdvj\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.622376 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24fcf86b-13a3-46c0-bea6-37ef4da29b48-combined-ca-bundle\") pod \"barbican-api-798767f9fd-kfrfz\" (UID: \"24fcf86b-13a3-46c0-bea6-37ef4da29b48\") " pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.691731 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.691782 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhvgr\" (UniqueName: \"kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.691822 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.691903 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.691918 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.692036 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.695605 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.710254 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.721151 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.722726 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.738996 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794355 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794746 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794809 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794828 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794845 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794877 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794899 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794923 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794958 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.794986 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhvgr\" (UniqueName: \"kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.795003 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.795029 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.795081 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpl6\" (UniqueName: \"kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.795630 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.795783 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.800579 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.800881 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.804880 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.809476 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.811586 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.821240 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhvgr\" (UniqueName: \"kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr\") pod \"cinder-scheduler-0\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.835753 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.860132 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.861590 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.863968 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.887268 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.894444 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896443 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpl6\" (UniqueName: \"kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896514 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896561 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896584 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896608 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.896652 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.898558 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.902770 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.903319 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.903832 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.904506 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.940213 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.941731 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.944177 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpl6\" (UniqueName: \"kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6\") pod \"dnsmasq-dns-5c9776ccc5-xxm6x\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.944221 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 11:30:25 crc kubenswrapper[4728]: I1205 11:30:25.949152 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000311 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000352 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000372 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000390 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000408 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000429 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000448 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000480 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000495 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000519 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000534 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000552 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000569 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4dm5\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000587 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000608 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000710 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqbgd\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000729 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000743 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000765 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000781 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000814 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000830 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000853 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000870 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000885 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000904 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000922 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000942 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000959 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.000990 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4wnt\" (UniqueName: \"kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001008 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001026 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001046 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001060 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001076 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001090 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001114 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001133 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.001155 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.052630 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102488 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102527 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102551 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102570 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102590 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102606 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102624 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102641 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102662 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102678 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102703 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102721 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102726 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102766 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102743 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103192 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103212 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103229 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4dm5\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102642 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103246 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102815 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103299 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103360 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103416 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103437 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqbgd\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103455 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103513 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103549 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103592 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103619 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103659 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103678 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103700 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103723 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103752 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103803 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103830 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103861 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4wnt\" (UniqueName: \"kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103897 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103924 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103956 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.103981 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104004 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104019 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104233 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104234 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104306 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104425 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104475 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102815 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104502 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104520 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104553 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104559 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.102863 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104596 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104606 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104530 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.104631 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.107971 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.108078 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.109207 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.109322 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.109360 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.109364 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.111558 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.112826 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.114931 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.121243 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.123249 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.124808 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.125307 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.126123 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4dm5\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5\") pod \"cinder-backup-0\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.127439 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqbgd\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.127519 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.128263 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.128346 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4wnt\" (UniqueName: \"kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.130180 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom\") pod \"cinder-api-0\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.141999 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.300701 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.309422 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.353161 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.476234 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5cf4cb67d5-pxwtj" Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.550677 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.551199 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76476c596-dh9zg" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-api" containerID="cri-o://a424612358c0338ec1134ced5d16490f2c4b85530adaef3d897eb3a80f1f2e77" gracePeriod=30 Dec 05 11:30:26 crc kubenswrapper[4728]: I1205 11:30:26.551674 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76476c596-dh9zg" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-httpd" containerID="cri-o://38d755236e4efaf1855f3e775b70540d8fa5c965fa203ac9cdcacaa126da088d" gracePeriod=30 Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.053959 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.155411 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.155960 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.155991 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.156038 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.156084 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.156111 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5nvc\" (UniqueName: \"kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.156230 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd\") pod \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\" (UID: \"dbac4a04-13e4-43e5-a221-3faec9e9fac7\") " Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.159323 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.163280 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.183070 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc" (OuterVolumeSpecName: "kube-api-access-v5nvc") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "kube-api-access-v5nvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.212902 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.216953 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts" (OuterVolumeSpecName: "scripts") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.283825 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.293483 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5nvc\" (UniqueName: \"kubernetes.io/projected/dbac4a04-13e4-43e5-a221-3faec9e9fac7-kube-api-access-v5nvc\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.293569 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.293644 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.293713 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbac4a04-13e4-43e5-a221-3faec9e9fac7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.353941 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.398853 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.433880 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.461654 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.486943 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data" (OuterVolumeSpecName: "config-data") pod "dbac4a04-13e4-43e5-a221-3faec9e9fac7" (UID: "dbac4a04-13e4-43e5-a221-3faec9e9fac7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.491109 4728 generic.go:334] "Generic (PLEG): container finished" podID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerID="38d755236e4efaf1855f3e775b70540d8fa5c965fa203ac9cdcacaa126da088d" exitCode=0 Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.491209 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerDied","Data":"38d755236e4efaf1855f3e775b70540d8fa5c965fa203ac9cdcacaa126da088d"} Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.506857 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbac4a04-13e4-43e5-a221-3faec9e9fac7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.521091 4728 generic.go:334] "Generic (PLEG): container finished" podID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerID="900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302" exitCode=0 Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.521192 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerDied","Data":"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302"} Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.521225 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dbac4a04-13e4-43e5-a221-3faec9e9fac7","Type":"ContainerDied","Data":"3d7c688c4097698417f3ffe9cdd94dc9c7e500c58100bb28e5443023bdf0f232"} Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.521247 4728 scope.go:117] "RemoveContainer" containerID="e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.521411 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.526558 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f87fccb55-qhksr" event={"ID":"2b11e743-92a0-4601-8cdf-935c3cc54a55","Type":"ContainerStarted","Data":"8e6e50f6a649d183dd658c2b20368a262796813549fd58925d40fc87cbe099ef"} Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.530199 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" event={"ID":"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2","Type":"ContainerStarted","Data":"0d7743c9da88b12c6fd4bb0429caa24f258c825edc0dca752576a8a4577a3807"} Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.530443 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="dnsmasq-dns" containerID="cri-o://6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118" gracePeriod=10 Dec 05 11:30:27 crc kubenswrapper[4728]: W1205 11:30:27.538970 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44b0cb2c_149f_4a9d_a494_cb9542391b4f.slice/crio-5b78811fad174433d7565eb792615c95d0daa3bce86bb98e959b2fdcee9a64e2 WatchSource:0}: Error finding container 5b78811fad174433d7565eb792615c95d0daa3bce86bb98e959b2fdcee9a64e2: Status 404 returned error can't find the container with id 5b78811fad174433d7565eb792615c95d0daa3bce86bb98e959b2fdcee9a64e2 Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.559513 4728 scope.go:117] "RemoveContainer" containerID="ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.599156 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.624034 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.641832 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649034 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.649452 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-central-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649469 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-central-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.649485 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="proxy-httpd" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649492 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="proxy-httpd" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.649532 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="sg-core" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649539 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="sg-core" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.649547 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-notification-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649552 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-notification-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649735 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="proxy-httpd" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649750 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="sg-core" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649765 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-notification-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.649776 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" containerName="ceilometer-central-agent" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.651882 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.659978 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.661734 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.662826 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.677454 4728 scope.go:117] "RemoveContainer" containerID="900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.704296 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-798767f9fd-kfrfz"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.718615 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.718876 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.718942 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.718966 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gprw\" (UniqueName: \"kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.718990 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.719007 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.719026 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.803463 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.820711 4728 scope.go:117] "RemoveContainer" containerID="0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.821859 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.821908 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.821943 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.822074 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.822113 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.822224 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.822260 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gprw\" (UniqueName: \"kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.823357 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: W1205 11:30:27.823453 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36b86337_512e_47a3_80ec_ea9c3b25c3f6.slice/crio-73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1 WatchSource:0}: Error finding container 73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1: Status 404 returned error can't find the container with id 73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1 Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.824468 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.828617 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.829091 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.830221 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.831289 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.846262 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gprw\" (UniqueName: \"kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw\") pod \"ceilometer-0\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " pod="openstack/ceilometer-0" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.862909 4728 scope.go:117] "RemoveContainer" containerID="e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.866888 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec\": container with ID starting with e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec not found: ID does not exist" containerID="e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.866931 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec"} err="failed to get container status \"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec\": rpc error: code = NotFound desc = could not find container \"e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec\": container with ID starting with e02df1173130b328a095083b29f0fa2593844521e5456035d1c5fea3103e91ec not found: ID does not exist" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.866961 4728 scope.go:117] "RemoveContainer" containerID="ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.871863 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53\": container with ID starting with ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53 not found: ID does not exist" containerID="ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.871889 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53"} err="failed to get container status \"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53\": rpc error: code = NotFound desc = could not find container \"ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53\": container with ID starting with ca5f63c44cd9390148f011388c22d0f4f7300e9187fe267780b4aa9909991f53 not found: ID does not exist" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.871904 4728 scope.go:117] "RemoveContainer" containerID="900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.874039 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302\": container with ID starting with 900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302 not found: ID does not exist" containerID="900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.874089 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302"} err="failed to get container status \"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302\": rpc error: code = NotFound desc = could not find container \"900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302\": container with ID starting with 900224b2993b650f331db1f1a449c72f71c4d12c706517d76a9bd5bdbc624302 not found: ID does not exist" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.874122 4728 scope.go:117] "RemoveContainer" containerID="0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d" Dec 05 11:30:27 crc kubenswrapper[4728]: E1205 11:30:27.875425 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d\": container with ID starting with 0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d not found: ID does not exist" containerID="0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.875455 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d"} err="failed to get container status \"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d\": rpc error: code = NotFound desc = could not find container \"0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d\": container with ID starting with 0e7730282b9724a9f5acf4019d6f17483c6d6fbe81837bb2dda34843238a1e8d not found: ID does not exist" Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.927075 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:27 crc kubenswrapper[4728]: I1205 11:30:27.991932 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.178912 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.230203 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.230372 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.230573 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.230629 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.230659 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnstr\" (UniqueName: \"kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.231186 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb\") pod \"6cd833b3-79f5-41f4-8d37-07b81c016e23\" (UID: \"6cd833b3-79f5-41f4-8d37-07b81c016e23\") " Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.246472 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr" (OuterVolumeSpecName: "kube-api-access-qnstr") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "kube-api-access-qnstr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.320353 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.341735 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnstr\" (UniqueName: \"kubernetes.io/projected/6cd833b3-79f5-41f4-8d37-07b81c016e23-kube-api-access-qnstr\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.341775 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.346387 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config" (OuterVolumeSpecName: "config") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.392468 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.420158 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbac4a04-13e4-43e5-a221-3faec9e9fac7" path="/var/lib/kubelet/pods/dbac4a04-13e4-43e5-a221-3faec9e9fac7/volumes" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.439250 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.439592 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6cd833b3-79f5-41f4-8d37-07b81c016e23" (UID: "6cd833b3-79f5-41f4-8d37-07b81c016e23"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.447937 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.449191 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.449278 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.449386 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cd833b3-79f5-41f4-8d37-07b81c016e23-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.521389 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.546926 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerStarted","Data":"f524fdce08c1f6cad215a756b52bf41b478474abefde28ab4043e41ff28ae4d0"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.548324 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerStarted","Data":"d92714cb021d20509909d4edbedd043a083a3688e8c692935196671190eecf24"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.549334 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-798767f9fd-kfrfz" event={"ID":"24fcf86b-13a3-46c0-bea6-37ef4da29b48","Type":"ContainerStarted","Data":"92f32a47e856f346936aede709a9d70f3ec35b4d8c8d16b34710c51e0bc348b1"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.549356 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-798767f9fd-kfrfz" event={"ID":"24fcf86b-13a3-46c0-bea6-37ef4da29b48","Type":"ContainerStarted","Data":"9d7eb1458fbca8e5f0abaab29b5c1eca58810a0097839a35f3ee389668b8fb6d"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.550417 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" event={"ID":"37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2","Type":"ContainerStarted","Data":"2c92a995e657645a86b7f42d925e85f43590b7026c091af85bd7d9920b3a6294"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.554417 4728 generic.go:334] "Generic (PLEG): container finished" podID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerID="6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118" exitCode=0 Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.554488 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" event={"ID":"6cd833b3-79f5-41f4-8d37-07b81c016e23","Type":"ContainerDied","Data":"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.554520 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" event={"ID":"6cd833b3-79f5-41f4-8d37-07b81c016e23","Type":"ContainerDied","Data":"0d0465bd2e607458d468524a8b64606b1ad81542c15034a64b424e491b680b42"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.554537 4728 scope.go:117] "RemoveContainer" containerID="6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.554653 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-kj9k8" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.585389 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-76974b5d9d-khzwj" podStartSLOduration=3.278855986 podStartE2EDuration="6.585364277s" podCreationTimestamp="2025-12-05 11:30:22 +0000 UTC" firstStartedPulling="2025-12-05 11:30:23.414456644 +0000 UTC m=+1357.556579337" lastFinishedPulling="2025-12-05 11:30:26.720964935 +0000 UTC m=+1360.863087628" observedRunningTime="2025-12-05 11:30:28.570164325 +0000 UTC m=+1362.712287028" watchObservedRunningTime="2025-12-05 11:30:28.585364277 +0000 UTC m=+1362.727486970" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.592609 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerStarted","Data":"5b78811fad174433d7565eb792615c95d0daa3bce86bb98e959b2fdcee9a64e2"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.596455 4728 generic.go:334] "Generic (PLEG): container finished" podID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerID="892799e7cfa71a099c9020b214b671957a638cacad6b70b2f97af52abf6e69b2" exitCode=0 Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.596523 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" event={"ID":"296a9db8-738c-4a95-87ed-3037d5b6ddf5","Type":"ContainerDied","Data":"892799e7cfa71a099c9020b214b671957a638cacad6b70b2f97af52abf6e69b2"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.596550 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" event={"ID":"296a9db8-738c-4a95-87ed-3037d5b6ddf5","Type":"ContainerStarted","Data":"7cda411cf3b26d6f6740e94690883b9e9c3b2f260e17e6f1e4d0aa3f7acdc75b"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.616106 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerStarted","Data":"73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.618755 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.628022 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-kj9k8"] Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.647897 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f87fccb55-qhksr" event={"ID":"2b11e743-92a0-4601-8cdf-935c3cc54a55","Type":"ContainerStarted","Data":"a15111bb7929fa26c86256ac88226da9f433c6a95916e2724c25cd472f5e8c46"} Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.666949 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7f87fccb55-qhksr" podStartSLOduration=3.077330449 podStartE2EDuration="6.666929618s" podCreationTimestamp="2025-12-05 11:30:22 +0000 UTC" firstStartedPulling="2025-12-05 11:30:23.121870844 +0000 UTC m=+1357.263993537" lastFinishedPulling="2025-12-05 11:30:26.711470013 +0000 UTC m=+1360.853592706" observedRunningTime="2025-12-05 11:30:28.663810165 +0000 UTC m=+1362.805932858" watchObservedRunningTime="2025-12-05 11:30:28.666929618 +0000 UTC m=+1362.809052311" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.739615 4728 scope.go:117] "RemoveContainer" containerID="97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.945988 4728 scope.go:117] "RemoveContainer" containerID="6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118" Dec 05 11:30:28 crc kubenswrapper[4728]: E1205 11:30:28.946431 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118\": container with ID starting with 6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118 not found: ID does not exist" containerID="6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.946470 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118"} err="failed to get container status \"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118\": rpc error: code = NotFound desc = could not find container \"6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118\": container with ID starting with 6b729350de6fe3793305ea2f19e4f2b8e4a63f24e957ddc940b779b2d6841118 not found: ID does not exist" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.946498 4728 scope.go:117] "RemoveContainer" containerID="97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73" Dec 05 11:30:28 crc kubenswrapper[4728]: E1205 11:30:28.946888 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73\": container with ID starting with 97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73 not found: ID does not exist" containerID="97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73" Dec 05 11:30:28 crc kubenswrapper[4728]: I1205 11:30:28.946937 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73"} err="failed to get container status \"97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73\": rpc error: code = NotFound desc = could not find container \"97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73\": container with ID starting with 97494aee051f73b4559160ae835f8908accc5dbec90c6733ae3fe69b27cc8e73 not found: ID does not exist" Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.086521 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.736458 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerStarted","Data":"f55387ee5b3efd1c45350430b8c7419f48deb63ae51d6391c13024c94cfc4cf8"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.791238 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerStarted","Data":"6f6a28abdca56c20264691e4ade4b122fdbe08a6d995e8a70b343e5f887a512c"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.814453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-798767f9fd-kfrfz" event={"ID":"24fcf86b-13a3-46c0-bea6-37ef4da29b48","Type":"ContainerStarted","Data":"12f3ecd0ce4280a1104d469b90a37e57f1d2c0d7467146f6503a3962665119b6"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.815656 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.815677 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.837059 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerStarted","Data":"34573b7eccd412b610bbb3790db86f1f08430e7fb7d3f13a8dd085daca5c02c4"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.855394 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-798767f9fd-kfrfz" podStartSLOduration=4.855379097 podStartE2EDuration="4.855379097s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:29.853634151 +0000 UTC m=+1363.995756864" watchObservedRunningTime="2025-12-05 11:30:29.855379097 +0000 UTC m=+1363.997501780" Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.866085 4728 generic.go:334] "Generic (PLEG): container finished" podID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerID="a424612358c0338ec1134ced5d16490f2c4b85530adaef3d897eb3a80f1f2e77" exitCode=0 Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.866206 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerDied","Data":"a424612358c0338ec1134ced5d16490f2c4b85530adaef3d897eb3a80f1f2e77"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.870093 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" event={"ID":"296a9db8-738c-4a95-87ed-3037d5b6ddf5","Type":"ContainerStarted","Data":"6dd9d3a34dedd9407e02a156620c7d206a6463ca97d95c323314e1a916f07809"} Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.870657 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:29 crc kubenswrapper[4728]: I1205 11:30:29.915025 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" podStartSLOduration=4.915003477 podStartE2EDuration="4.915003477s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:29.910785045 +0000 UTC m=+1364.052907738" watchObservedRunningTime="2025-12-05 11:30:29.915003477 +0000 UTC m=+1364.057126190" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.374979 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" path="/var/lib/kubelet/pods/6cd833b3-79f5-41f4-8d37-07b81c016e23/volumes" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.423482 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.598430 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.610498 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn7zj\" (UniqueName: \"kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj\") pod \"b2657bde-2b78-49b2-bb33-a085f5a42024\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.610623 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle\") pod \"b2657bde-2b78-49b2-bb33-a085f5a42024\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.610676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config\") pod \"b2657bde-2b78-49b2-bb33-a085f5a42024\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.610732 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config\") pod \"b2657bde-2b78-49b2-bb33-a085f5a42024\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.610835 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs\") pod \"b2657bde-2b78-49b2-bb33-a085f5a42024\" (UID: \"b2657bde-2b78-49b2-bb33-a085f5a42024\") " Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.620567 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj" (OuterVolumeSpecName: "kube-api-access-tn7zj") pod "b2657bde-2b78-49b2-bb33-a085f5a42024" (UID: "b2657bde-2b78-49b2-bb33-a085f5a42024"). InnerVolumeSpecName "kube-api-access-tn7zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.634266 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b2657bde-2b78-49b2-bb33-a085f5a42024" (UID: "b2657bde-2b78-49b2-bb33-a085f5a42024"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.714383 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn7zj\" (UniqueName: \"kubernetes.io/projected/b2657bde-2b78-49b2-bb33-a085f5a42024-kube-api-access-tn7zj\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.714413 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.729889 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2657bde-2b78-49b2-bb33-a085f5a42024" (UID: "b2657bde-2b78-49b2-bb33-a085f5a42024"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.760916 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config" (OuterVolumeSpecName: "config") pod "b2657bde-2b78-49b2-bb33-a085f5a42024" (UID: "b2657bde-2b78-49b2-bb33-a085f5a42024"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.788422 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b2657bde-2b78-49b2-bb33-a085f5a42024" (UID: "b2657bde-2b78-49b2-bb33-a085f5a42024"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.816102 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.816379 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.816389 4728 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2657bde-2b78-49b2-bb33-a085f5a42024-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.932748 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerStarted","Data":"2e32be107c6c7a3cc13b2c62a76dd6b7745d7de8931f65993b5363cf07b691ee"} Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.951580 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76476c596-dh9zg" event={"ID":"b2657bde-2b78-49b2-bb33-a085f5a42024","Type":"ContainerDied","Data":"bada9909a6a41cb02ba67ceeafaecb0c6b4a6a02ce584400e5f5535421f40b41"} Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.951629 4728 scope.go:117] "RemoveContainer" containerID="38d755236e4efaf1855f3e775b70540d8fa5c965fa203ac9cdcacaa126da088d" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.951834 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76476c596-dh9zg" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.953905 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerStarted","Data":"425007df221e112a565a0188f7d6bd3e60cdbec2e9a8b8d7180f5eb19aa8a152"} Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.953942 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerStarted","Data":"a8bf728812863067c249dc15bb0b97b2388d44dc7ff636fd27cfdc36b914e72c"} Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.963300 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerStarted","Data":"a0fa70a421a579bea670c41362eb3ff3a96d194d5e8819e4542494f4411026f6"} Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.963470 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api-log" containerID="cri-o://f55387ee5b3efd1c45350430b8c7419f48deb63ae51d6391c13024c94cfc4cf8" gracePeriod=30 Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.963668 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 11:30:30 crc kubenswrapper[4728]: I1205 11:30:30.963747 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api" containerID="cri-o://a0fa70a421a579bea670c41362eb3ff3a96d194d5e8819e4542494f4411026f6" gracePeriod=30 Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.019311 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerStarted","Data":"ab74e70e8fd0007fdf95b3b591eb20574d53752908e920d12a501578810735db"} Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.027901 4728 scope.go:117] "RemoveContainer" containerID="a424612358c0338ec1134ced5d16490f2c4b85530adaef3d897eb3a80f1f2e77" Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.031859 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=4.921829309 podStartE2EDuration="6.031834189s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="2025-12-05 11:30:27.968941571 +0000 UTC m=+1362.111064264" lastFinishedPulling="2025-12-05 11:30:29.078946451 +0000 UTC m=+1363.221069144" observedRunningTime="2025-12-05 11:30:31.005232824 +0000 UTC m=+1365.147355527" watchObservedRunningTime="2025-12-05 11:30:31.031834189 +0000 UTC m=+1365.173956882" Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.038327 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.03830857 podStartE2EDuration="6.03830857s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:31.034822208 +0000 UTC m=+1365.176944901" watchObservedRunningTime="2025-12-05 11:30:31.03830857 +0000 UTC m=+1365.180431263" Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.045805 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerStarted","Data":"9abebf2f45df1f6cb123002e4ef7f9188cc958011fd013fb816cf7d5beae1565"} Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.094920 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=4.923359829 podStartE2EDuration="6.094895159s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="2025-12-05 11:30:27.827475484 +0000 UTC m=+1361.969598177" lastFinishedPulling="2025-12-05 11:30:28.999010814 +0000 UTC m=+1363.141133507" observedRunningTime="2025-12-05 11:30:31.078911096 +0000 UTC m=+1365.221033809" watchObservedRunningTime="2025-12-05 11:30:31.094895159 +0000 UTC m=+1365.237017852" Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.137022 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.142880 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.147634 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-76476c596-dh9zg"] Dec 05 11:30:31 crc kubenswrapper[4728]: I1205 11:30:31.301146 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.053766 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerStarted","Data":"129b9328641f752d80d23c6938388874ff7c60006f7a0efdc7c1d0b697ae5cfb"} Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.056279 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerStarted","Data":"794b1f4b7e349796e5084b3141fc0d98aba909f56faa9534d2a57dfe8e876706"} Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.061641 4728 generic.go:334] "Generic (PLEG): container finished" podID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerID="f55387ee5b3efd1c45350430b8c7419f48deb63ae51d6391c13024c94cfc4cf8" exitCode=143 Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.061747 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerDied","Data":"f55387ee5b3efd1c45350430b8c7419f48deb63ae51d6391c13024c94cfc4cf8"} Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.087959 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.214785602 podStartE2EDuration="7.087942407s" podCreationTimestamp="2025-12-05 11:30:25 +0000 UTC" firstStartedPulling="2025-12-05 11:30:27.558843348 +0000 UTC m=+1361.700966041" lastFinishedPulling="2025-12-05 11:30:29.432000153 +0000 UTC m=+1363.574122846" observedRunningTime="2025-12-05 11:30:32.078433207 +0000 UTC m=+1366.220555920" watchObservedRunningTime="2025-12-05 11:30:32.087942407 +0000 UTC m=+1366.230065100" Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.362708 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" path="/var/lib/kubelet/pods/b2657bde-2b78-49b2-bb33-a085f5a42024/volumes" Dec 05 11:30:32 crc kubenswrapper[4728]: I1205 11:30:32.730562 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.076833 4728 generic.go:334] "Generic (PLEG): container finished" podID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerID="a0fa70a421a579bea670c41362eb3ff3a96d194d5e8819e4542494f4411026f6" exitCode=0 Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.076923 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerDied","Data":"a0fa70a421a579bea670c41362eb3ff3a96d194d5e8819e4542494f4411026f6"} Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.861593 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.916284 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.917468 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.976368 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4wnt\" (UniqueName: \"kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.976767 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977410 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs" (OuterVolumeSpecName: "logs") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977482 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977520 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977697 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977734 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom\") pod \"a3771ab3-bbe0-4af1-af70-01155203ec37\" (UID: \"a3771ab3-bbe0-4af1-af70-01155203ec37\") " Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.977900 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.978409 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3771ab3-bbe0-4af1-af70-01155203ec37-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.978427 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3771ab3-bbe0-4af1-af70-01155203ec37-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.988013 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.988284 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts" (OuterVolumeSpecName: "scripts") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:33 crc kubenswrapper[4728]: I1205 11:30:33.989060 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt" (OuterVolumeSpecName: "kube-api-access-g4wnt") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "kube-api-access-g4wnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.081625 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.081666 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4wnt\" (UniqueName: \"kubernetes.io/projected/a3771ab3-bbe0-4af1-af70-01155203ec37-kube-api-access-g4wnt\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.081684 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.104102 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data" (OuterVolumeSpecName: "config-data") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.116534 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a3771ab3-bbe0-4af1-af70-01155203ec37","Type":"ContainerDied","Data":"d92714cb021d20509909d4edbedd043a083a3688e8c692935196671190eecf24"} Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.116625 4728 scope.go:117] "RemoveContainer" containerID="a0fa70a421a579bea670c41362eb3ff3a96d194d5e8819e4542494f4411026f6" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.116976 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.127204 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3771ab3-bbe0-4af1-af70-01155203ec37" (UID: "a3771ab3-bbe0-4af1-af70-01155203ec37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.183656 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.183703 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3771ab3-bbe0-4af1-af70-01155203ec37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.192266 4728 scope.go:117] "RemoveContainer" containerID="f55387ee5b3efd1c45350430b8c7419f48deb63ae51d6391c13024c94cfc4cf8" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.443249 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.452412 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.479524 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.479966 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.479985 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api" Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.479998 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-api" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480006 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-api" Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.480023 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api-log" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480030 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api-log" Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.480044 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-httpd" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480049 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-httpd" Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.480061 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="init" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480067 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="init" Dec 05 11:30:34 crc kubenswrapper[4728]: E1205 11:30:34.480080 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="dnsmasq-dns" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480086 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="dnsmasq-dns" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480263 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api-log" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480274 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-api" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480285 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cd833b3-79f5-41f4-8d37-07b81c016e23" containerName="dnsmasq-dns" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480295 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" containerName="cinder-api" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.480312 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2657bde-2b78-49b2-bb33-a085f5a42024" containerName="neutron-httpd" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.481317 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.495520 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.495916 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.496234 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.497637 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.610822 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.610875 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-scripts\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.610903 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cfc23d81-9123-49a5-b770-4f0b60e01d35-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.610960 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.611011 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.611036 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data-custom\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.611070 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.611090 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xgpz\" (UniqueName: \"kubernetes.io/projected/cfc23d81-9123-49a5-b770-4f0b60e01d35-kube-api-access-9xgpz\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.611149 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfc23d81-9123-49a5-b770-4f0b60e01d35-logs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.712858 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfc23d81-9123-49a5-b770-4f0b60e01d35-logs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.712935 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.712969 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-scripts\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.712998 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cfc23d81-9123-49a5-b770-4f0b60e01d35-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713043 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713088 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713106 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data-custom\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713136 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xgpz\" (UniqueName: \"kubernetes.io/projected/cfc23d81-9123-49a5-b770-4f0b60e01d35-kube-api-access-9xgpz\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713156 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713275 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cfc23d81-9123-49a5-b770-4f0b60e01d35-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.713347 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfc23d81-9123-49a5-b770-4f0b60e01d35-logs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.722925 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data-custom\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.723405 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.724137 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.725389 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-config-data\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.725832 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-scripts\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.726505 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc23d81-9123-49a5-b770-4f0b60e01d35-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.742640 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xgpz\" (UniqueName: \"kubernetes.io/projected/cfc23d81-9123-49a5-b770-4f0b60e01d35-kube-api-access-9xgpz\") pod \"cinder-api-0\" (UID: \"cfc23d81-9123-49a5-b770-4f0b60e01d35\") " pod="openstack/cinder-api-0" Dec 05 11:30:34 crc kubenswrapper[4728]: I1205 11:30:34.811856 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:30:35 crc kubenswrapper[4728]: I1205 11:30:35.128689 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerStarted","Data":"7f41e6e21f1e69685f975c1ac3340149f69d19beace7a8567c1beb0451d047c7"} Dec 05 11:30:35 crc kubenswrapper[4728]: I1205 11:30:35.680213 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-554ccc7b5b-l2c6v" Dec 05 11:30:35 crc kubenswrapper[4728]: I1205 11:30:35.865638 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 11:30:35 crc kubenswrapper[4728]: I1205 11:30:35.886934 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:30:35 crc kubenswrapper[4728]: W1205 11:30:35.898900 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfc23d81_9123_49a5_b770_4f0b60e01d35.slice/crio-b8c98e9c958eaf1119c18db52f2b513ad104963e224169a2d724dca9562c2812 WatchSource:0}: Error finding container b8c98e9c958eaf1119c18db52f2b513ad104963e224169a2d724dca9562c2812: Status 404 returned error can't find the container with id b8c98e9c958eaf1119c18db52f2b513ad104963e224169a2d724dca9562c2812 Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.053918 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.122151 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.122596 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="dnsmasq-dns" containerID="cri-o://ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0" gracePeriod=10 Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.153529 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cfc23d81-9123-49a5-b770-4f0b60e01d35","Type":"ContainerStarted","Data":"b8c98e9c958eaf1119c18db52f2b513ad104963e224169a2d724dca9562c2812"} Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.396295 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3771ab3-bbe0-4af1-af70-01155203ec37" path="/var/lib/kubelet/pods/a3771ab3-bbe0-4af1-af70-01155203ec37/volumes" Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.490144 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.519459 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-volume-volume1-0" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="cinder-volume" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.918644 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7755888bd8-shzsv" Dec 05 11:30:36 crc kubenswrapper[4728]: I1205 11:30:36.991140 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.005519 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.005751 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon-log" containerID="cri-o://854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054" gracePeriod=30 Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.005904 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" containerID="cri-o://bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8" gracePeriod=30 Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012707 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012785 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012823 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012850 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012910 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.012935 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkfpk\" (UniqueName: \"kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk\") pod \"289975c4-8dcc-4318-8c23-5acee4caa8bd\" (UID: \"289975c4-8dcc-4318-8c23-5acee4caa8bd\") " Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.023393 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.076363 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk" (OuterVolumeSpecName: "kube-api-access-wkfpk") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "kube-api-access-wkfpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.114461 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkfpk\" (UniqueName: \"kubernetes.io/projected/289975c4-8dcc-4318-8c23-5acee4caa8bd-kube-api-access-wkfpk\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.199333 4728 generic.go:334] "Generic (PLEG): container finished" podID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerID="ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0" exitCode=0 Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.199386 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerDied","Data":"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0"} Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.199421 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" event={"ID":"289975c4-8dcc-4318-8c23-5acee4caa8bd","Type":"ContainerDied","Data":"a66a9b305ca0855f3cf3c75cb0bd28bd4cc70385bd6da3db9ebb3b7d7921af99"} Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.199444 4728 scope.go:117] "RemoveContainer" containerID="ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.200029 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6pvrj" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.252241 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.314630 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.353222 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.358664 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.370678 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config" (OuterVolumeSpecName: "config") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.370766 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.408553 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.450745 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "289975c4-8dcc-4318-8c23-5acee4caa8bd" (UID: "289975c4-8dcc-4318-8c23-5acee4caa8bd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.478291 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.478568 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.478578 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.478586 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/289975c4-8dcc-4318-8c23-5acee4caa8bd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.526986 4728 scope.go:117] "RemoveContainer" containerID="6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.553837 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.565938 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6pvrj"] Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.605610 4728 scope.go:117] "RemoveContainer" containerID="ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0" Dec 05 11:30:37 crc kubenswrapper[4728]: E1205 11:30:37.605983 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0\": container with ID starting with ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0 not found: ID does not exist" containerID="ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.606013 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0"} err="failed to get container status \"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0\": rpc error: code = NotFound desc = could not find container \"ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0\": container with ID starting with ac327e3aa8bf4264f480f4a13e6627a466c4bed26f630fcbd35543f97f9172d0 not found: ID does not exist" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.606033 4728 scope.go:117] "RemoveContainer" containerID="6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1" Dec 05 11:30:37 crc kubenswrapper[4728]: E1205 11:30:37.606457 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1\": container with ID starting with 6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1 not found: ID does not exist" containerID="6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1" Dec 05 11:30:37 crc kubenswrapper[4728]: I1205 11:30:37.606480 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1"} err="failed to get container status \"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1\": rpc error: code = NotFound desc = could not find container \"6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1\": container with ID starting with 6d2e6eb8ad80c42266a730bbedae37dc194334970df25a9af8f3ad9bc9bc3eb1 not found: ID does not exist" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.110839 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.216943 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cfc23d81-9123-49a5-b770-4f0b60e01d35","Type":"ContainerStarted","Data":"245ac84f899cb12adca206a2fdbb53979a3350bb5f912bf8b8dfdf9c4b282375"} Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.221292 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-backup-0" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="cinder-backup" containerID="cri-o://6f6a28abdca56c20264691e4ade4b122fdbe08a6d995e8a70b343e5f887a512c" gracePeriod=30 Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.222467 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerStarted","Data":"e47102520c329e24dd4df17569516824f4fdfaa6fe2561775d2ca6e43705cd57"} Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.222526 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-backup-0" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="probe" containerID="cri-o://ab74e70e8fd0007fdf95b3b591eb20574d53752908e920d12a501578810735db" gracePeriod=30 Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.222663 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.249001 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.215828378 podStartE2EDuration="11.248982585s" podCreationTimestamp="2025-12-05 11:30:27 +0000 UTC" firstStartedPulling="2025-12-05 11:30:28.716605924 +0000 UTC m=+1362.858728617" lastFinishedPulling="2025-12-05 11:30:36.749760131 +0000 UTC m=+1370.891882824" observedRunningTime="2025-12-05 11:30:38.247479204 +0000 UTC m=+1372.389601907" watchObservedRunningTime="2025-12-05 11:30:38.248982585 +0000 UTC m=+1372.391105278" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.362058 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" path="/var/lib/kubelet/pods/289975c4-8dcc-4318-8c23-5acee4caa8bd/volumes" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.393455 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-798767f9fd-kfrfz" Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.490492 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.490985 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d6466fdb6-7fhbh" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api-log" containerID="cri-o://a17cdf0d6d59bb47b7c1c9d368a73bee178275f3c086c669fa938b563c01a115" gracePeriod=30 Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.491448 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d6466fdb6-7fhbh" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api" containerID="cri-o://652f04c78f6b6b46fd9dc695423eec317fb5cf018fd85fca0d78016a62b747f5" gracePeriod=30 Dec 05 11:30:38 crc kubenswrapper[4728]: I1205 11:30:38.992335 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6fbd7fcb8c-kr5v8" Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.242404 4728 generic.go:334] "Generic (PLEG): container finished" podID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerID="ab74e70e8fd0007fdf95b3b591eb20574d53752908e920d12a501578810735db" exitCode=0 Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.242512 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerDied","Data":"ab74e70e8fd0007fdf95b3b591eb20574d53752908e920d12a501578810735db"} Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.264520 4728 generic.go:334] "Generic (PLEG): container finished" podID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerID="a17cdf0d6d59bb47b7c1c9d368a73bee178275f3c086c669fa938b563c01a115" exitCode=143 Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.264621 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerDied","Data":"a17cdf0d6d59bb47b7c1c9d368a73bee178275f3c086c669fa938b563c01a115"} Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.286813 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cfc23d81-9123-49a5-b770-4f0b60e01d35","Type":"ContainerStarted","Data":"9242f948162cca2d248a1109cffedca889f00a1643ace4a469ef13d60ed4793f"} Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.286870 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 11:30:39 crc kubenswrapper[4728]: I1205 11:30:39.307663 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.30764016 podStartE2EDuration="5.30764016s" podCreationTimestamp="2025-12-05 11:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:39.303058345 +0000 UTC m=+1373.445181038" watchObservedRunningTime="2025-12-05 11:30:39.30764016 +0000 UTC m=+1373.449762853" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.303008 4728 generic.go:334] "Generic (PLEG): container finished" podID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerID="6f6a28abdca56c20264691e4ade4b122fdbe08a6d995e8a70b343e5f887a512c" exitCode=0 Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.306452 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerDied","Data":"6f6a28abdca56c20264691e4ade4b122fdbe08a6d995e8a70b343e5f887a512c"} Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.306585 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"36b86337-512e-47a3-80ec-ea9c3b25c3f6","Type":"ContainerDied","Data":"73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1"} Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.306706 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73367f1b31d0cc3924e417e740fd3f3f7967a2b493c4353a2bf41f9d8b32fcb1" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.373525 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.445552 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:44984->10.217.0.151:8443: read: connection reset by peer" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.448930 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449128 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449079 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449201 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449263 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449323 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449428 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449467 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449489 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449511 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449575 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449362 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449402 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev" (OuterVolumeSpecName: "dev") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.449617 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450290 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450430 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450491 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450522 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450543 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450570 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4dm5\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.450631 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder\") pod \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\" (UID: \"36b86337-512e-47a3-80ec-ea9c3b25c3f6\") " Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451222 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451468 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451557 4728 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-iscsi\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451617 4728 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-lib-modules\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451673 4728 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-etc-nvme\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451735 4728 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-dev\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451807 4728 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451914 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run" (OuterVolumeSpecName: "run") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451925 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys" (OuterVolumeSpecName: "sys") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.451940 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.452350 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.479006 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.479158 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts" (OuterVolumeSpecName: "scripts") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.479256 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph" (OuterVolumeSpecName: "ceph") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.493307 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5" (OuterVolumeSpecName: "kube-api-access-t4dm5") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "kube-api-access-t4dm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.522941 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.553955 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.553996 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554010 4728 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554019 4728 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-brick\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554028 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4dm5\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-kube-api-access-t4dm5\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554036 4728 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554045 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36b86337-512e-47a3-80ec-ea9c3b25c3f6-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554054 4728 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/36b86337-512e-47a3-80ec-ea9c3b25c3f6-sys\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.554065 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.620474 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data" (OuterVolumeSpecName: "config-data") pod "36b86337-512e-47a3-80ec-ea9c3b25c3f6" (UID: "36b86337-512e-47a3-80ec-ea9c3b25c3f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.655832 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b86337-512e-47a3-80ec-ea9c3b25c3f6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.874491 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.893850 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Dec 05 11:30:40 crc kubenswrapper[4728]: I1205 11:30:40.918404 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.160721 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.238890 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.314997 4728 generic.go:334] "Generic (PLEG): container finished" podID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerID="bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8" exitCode=0 Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.315077 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.318103 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerDied","Data":"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8"} Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.318243 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-volume-volume1-0" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="cinder-volume" containerID="cri-o://a8bf728812863067c249dc15bb0b97b2388d44dc7ff636fd27cfdc36b914e72c" gracePeriod=30 Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.318260 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="cinder-scheduler" containerID="cri-o://2e32be107c6c7a3cc13b2c62a76dd6b7745d7de8931f65993b5363cf07b691ee" gracePeriod=30 Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.318271 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-volume-volume1-0" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="probe" containerID="cri-o://425007df221e112a565a0188f7d6bd3e60cdbec2e9a8b8d7180f5eb19aa8a152" gracePeriod=30 Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.318306 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="probe" containerID="cri-o://794b1f4b7e349796e5084b3141fc0d98aba909f56faa9534d2a57dfe8e876706" gracePeriod=30 Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.358649 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.370524 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.389188 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: E1205 11:30:41.389780 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="cinder-backup" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.389896 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="cinder-backup" Dec 05 11:30:41 crc kubenswrapper[4728]: E1205 11:30:41.389970 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="init" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390029 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="init" Dec 05 11:30:41 crc kubenswrapper[4728]: E1205 11:30:41.390085 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="probe" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390147 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="probe" Dec 05 11:30:41 crc kubenswrapper[4728]: E1205 11:30:41.390216 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="dnsmasq-dns" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390265 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="dnsmasq-dns" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390488 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="cinder-backup" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390552 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" containerName="probe" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.390611 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="289975c4-8dcc-4318-8c23-5acee4caa8bd" containerName="dnsmasq-dns" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.391615 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.398886 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.417280 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477052 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477106 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-lib-modules\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477145 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477189 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zgt8\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-kube-api-access-7zgt8\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477245 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477291 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-dev\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477306 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-scripts\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477324 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data-custom\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477338 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477354 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-run\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477374 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477388 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477405 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477424 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-ceph\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477483 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-nvme\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.477498 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-sys\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579053 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579130 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-scripts\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579146 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-dev\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579166 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data-custom\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579185 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579206 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-run\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579226 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579258 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579276 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-ceph\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579308 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-nvme\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579324 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-sys\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579351 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579377 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-lib-modules\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579406 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.579438 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zgt8\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-kube-api-access-7zgt8\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580023 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580440 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-sys\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580472 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-lib-modules\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580446 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580480 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-dev\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580440 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580522 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-etc-nvme\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580488 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580614 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.580786 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75b2a689-5a22-4496-af32-4e93e0b2f3df-run\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.585456 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-scripts\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.586271 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.588738 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-config-data-custom\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.589340 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75b2a689-5a22-4496-af32-4e93e0b2f3df-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.589643 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-ceph\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.602005 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zgt8\" (UniqueName: \"kubernetes.io/projected/75b2a689-5a22-4496-af32-4e93e0b2f3df-kube-api-access-7zgt8\") pod \"cinder-backup-0\" (UID: \"75b2a689-5a22-4496-af32-4e93e0b2f3df\") " pod="openstack/cinder-backup-0" Dec 05 11:30:41 crc kubenswrapper[4728]: I1205 11:30:41.712728 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.339001 4728 generic.go:334] "Generic (PLEG): container finished" podID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerID="794b1f4b7e349796e5084b3141fc0d98aba909f56faa9534d2a57dfe8e876706" exitCode=0 Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.339492 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerDied","Data":"794b1f4b7e349796e5084b3141fc0d98aba909f56faa9534d2a57dfe8e876706"} Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.345413 4728 generic.go:334] "Generic (PLEG): container finished" podID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerID="652f04c78f6b6b46fd9dc695423eec317fb5cf018fd85fca0d78016a62b747f5" exitCode=0 Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.345463 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerDied","Data":"652f04c78f6b6b46fd9dc695423eec317fb5cf018fd85fca0d78016a62b747f5"} Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.345494 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d6466fdb6-7fhbh" event={"ID":"f7faef5d-1c9d-4820-a97e-22486f06f9ce","Type":"ContainerDied","Data":"683b15eb91e6f9c6665a00ff0da11e1556269d4472e164c17192b2d73e2e256d"} Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.345507 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="683b15eb91e6f9c6665a00ff0da11e1556269d4472e164c17192b2d73e2e256d" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.374894 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.379997 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36b86337-512e-47a3-80ec-ea9c3b25c3f6" path="/var/lib/kubelet/pods/36b86337-512e-47a3-80ec-ea9c3b25c3f6/volumes" Dec 05 11:30:42 crc kubenswrapper[4728]: W1205 11:30:42.506938 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75b2a689_5a22_4496_af32_4e93e0b2f3df.slice/crio-badd75462bfc8422e0f4676c8e76bfce079037eb8c1670d2a12caf7678fd1937 WatchSource:0}: Error finding container badd75462bfc8422e0f4676c8e76bfce079037eb8c1670d2a12caf7678fd1937: Status 404 returned error can't find the container with id badd75462bfc8422e0f4676c8e76bfce079037eb8c1670d2a12caf7678fd1937 Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.517408 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrbdm\" (UniqueName: \"kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm\") pod \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.517625 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs\") pod \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.517809 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data\") pod \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.517889 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle\") pod \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.517913 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom\") pod \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\" (UID: \"f7faef5d-1c9d-4820-a97e-22486f06f9ce\") " Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.518501 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs" (OuterVolumeSpecName: "logs") pod "f7faef5d-1c9d-4820-a97e-22486f06f9ce" (UID: "f7faef5d-1c9d-4820-a97e-22486f06f9ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.529143 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm" (OuterVolumeSpecName: "kube-api-access-wrbdm") pod "f7faef5d-1c9d-4820-a97e-22486f06f9ce" (UID: "f7faef5d-1c9d-4820-a97e-22486f06f9ce"). InnerVolumeSpecName "kube-api-access-wrbdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.540574 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.541960 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f7faef5d-1c9d-4820-a97e-22486f06f9ce" (UID: "f7faef5d-1c9d-4820-a97e-22486f06f9ce"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.620048 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrbdm\" (UniqueName: \"kubernetes.io/projected/f7faef5d-1c9d-4820-a97e-22486f06f9ce-kube-api-access-wrbdm\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.620080 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7faef5d-1c9d-4820-a97e-22486f06f9ce-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.620090 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.629997 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7faef5d-1c9d-4820-a97e-22486f06f9ce" (UID: "f7faef5d-1c9d-4820-a97e-22486f06f9ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.698185 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data" (OuterVolumeSpecName: "config-data") pod "f7faef5d-1c9d-4820-a97e-22486f06f9ce" (UID: "f7faef5d-1c9d-4820-a97e-22486f06f9ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.721985 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:42 crc kubenswrapper[4728]: I1205 11:30:42.722274 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7faef5d-1c9d-4820-a97e-22486f06f9ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.270078 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: E1205 11:30:43.270964 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.270979 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api" Dec 05 11:30:43 crc kubenswrapper[4728]: E1205 11:30:43.270997 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api-log" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.271003 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api-log" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.271187 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api-log" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.271222 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" containerName="barbican-api" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.271914 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.276049 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.276232 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.276412 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-bmmf6" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.302009 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.333681 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76xcs\" (UniqueName: \"kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.333736 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.333821 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.333846 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.390960 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"75b2a689-5a22-4496-af32-4e93e0b2f3df","Type":"ContainerStarted","Data":"30097a07363903ec8de709f6246a0b9252f6a83a6b9b6c62f833dcae8eca45de"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.391208 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"75b2a689-5a22-4496-af32-4e93e0b2f3df","Type":"ContainerStarted","Data":"4fbdb639d803dacc8967dad682e8383dc5cc445f585be5c41cec32347320ee5b"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.391219 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"75b2a689-5a22-4496-af32-4e93e0b2f3df","Type":"ContainerStarted","Data":"badd75462bfc8422e0f4676c8e76bfce079037eb8c1670d2a12caf7678fd1937"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.417009 4728 generic.go:334] "Generic (PLEG): container finished" podID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerID="2e32be107c6c7a3cc13b2c62a76dd6b7745d7de8931f65993b5363cf07b691ee" exitCode=0 Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.417103 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerDied","Data":"2e32be107c6c7a3cc13b2c62a76dd6b7745d7de8931f65993b5363cf07b691ee"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.426167 4728 generic.go:334] "Generic (PLEG): container finished" podID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerID="425007df221e112a565a0188f7d6bd3e60cdbec2e9a8b8d7180f5eb19aa8a152" exitCode=0 Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.426200 4728 generic.go:334] "Generic (PLEG): container finished" podID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerID="a8bf728812863067c249dc15bb0b97b2388d44dc7ff636fd27cfdc36b914e72c" exitCode=0 Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.426265 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d6466fdb6-7fhbh" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.427082 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerDied","Data":"425007df221e112a565a0188f7d6bd3e60cdbec2e9a8b8d7180f5eb19aa8a152"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.427129 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerDied","Data":"a8bf728812863067c249dc15bb0b97b2388d44dc7ff636fd27cfdc36b914e72c"} Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.435915 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.435971 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.436101 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76xcs\" (UniqueName: \"kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.436144 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.437825 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.446012 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.451360 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.453187 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.453169247 podStartE2EDuration="2.453169247s" podCreationTimestamp="2025-12-05 11:30:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:43.450829553 +0000 UTC m=+1377.592952256" watchObservedRunningTime="2025-12-05 11:30:43.453169247 +0000 UTC m=+1377.595291940" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.470978 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76xcs\" (UniqueName: \"kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs\") pod \"openstackclient\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.616838 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.646165 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6d6466fdb6-7fhbh"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.647350 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.687637 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.705581 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.726554 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.736496 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.744670 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.832946 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.869976 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.870041 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.870111 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config-secret\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.870141 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jqxl\" (UniqueName: \"kubernetes.io/projected/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-kube-api-access-2jqxl\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.949724 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:43 crc kubenswrapper[4728]: E1205 11:30:43.961064 4728 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 05 11:30:43 crc kubenswrapper[4728]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e1c36796-b303-4cf6-908d-d41c2a16f5b6_0(ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec" Netns:"/var/run/netns/de3ab79e-286a-46fa-8049-1ced34620a61" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec;K8S_POD_UID=e1c36796-b303-4cf6-908d-d41c2a16f5b6" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e1c36796-b303-4cf6-908d-d41c2a16f5b6]: expected pod UID "e1c36796-b303-4cf6-908d-d41c2a16f5b6" but got "6860f6fe-8127-4cbd-af2d-7e5e0e4ed001" from Kube API Dec 05 11:30:43 crc kubenswrapper[4728]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 11:30:43 crc kubenswrapper[4728]: > Dec 05 11:30:43 crc kubenswrapper[4728]: E1205 11:30:43.961144 4728 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 05 11:30:43 crc kubenswrapper[4728]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_e1c36796-b303-4cf6-908d-d41c2a16f5b6_0(ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec" Netns:"/var/run/netns/de3ab79e-286a-46fa-8049-1ced34620a61" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=ddfdaac7d93197a538a15bdce8b0a0bfa305938c1e870b7c1cf0c42332ae6bec;K8S_POD_UID=e1c36796-b303-4cf6-908d-d41c2a16f5b6" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/e1c36796-b303-4cf6-908d-d41c2a16f5b6]: expected pod UID "e1c36796-b303-4cf6-908d-d41c2a16f5b6" but got "6860f6fe-8127-4cbd-af2d-7e5e0e4ed001" from Kube API Dec 05 11:30:43 crc kubenswrapper[4728]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 11:30:43 crc kubenswrapper[4728]: > pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973294 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhvgr\" (UniqueName: \"kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973445 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973469 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973634 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973719 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.973859 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom\") pod \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\" (UID: \"44b0cb2c-149f-4a9d-a494-cb9542391b4f\") " Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.974182 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.974357 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.974545 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.974828 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config-secret\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.974905 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jqxl\" (UniqueName: \"kubernetes.io/projected/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-kube-api-access-2jqxl\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.977754 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/44b0cb2c-149f-4a9d-a494-cb9542391b4f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.978087 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.982939 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts" (OuterVolumeSpecName: "scripts") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.986544 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.986996 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr" (OuterVolumeSpecName: "kube-api-access-mhvgr") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "kube-api-access-mhvgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:43 crc kubenswrapper[4728]: I1205 11:30:43.988384 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-openstack-config-secret\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.000881 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.012819 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jqxl\" (UniqueName: \"kubernetes.io/projected/6860f6fe-8127-4cbd-af2d-7e5e0e4ed001-kube-api-access-2jqxl\") pod \"openstackclient\" (UID: \"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001\") " pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.076091 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.079969 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080047 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqbgd\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080097 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080112 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080141 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080179 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080197 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080240 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080252 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080288 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080315 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080380 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080398 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080441 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080457 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080473 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts\") pod \"3f719999-317e-4c1f-9d0d-53c9d1277e14\" (UID: \"3f719999-317e-4c1f-9d0d-53c9d1277e14\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080901 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080918 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080927 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhvgr\" (UniqueName: \"kubernetes.io/projected/44b0cb2c-149f-4a9d-a494-cb9542391b4f-kube-api-access-mhvgr\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.080937 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081204 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081492 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run" (OuterVolumeSpecName: "run") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081519 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081546 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081553 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081576 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev" (OuterVolumeSpecName: "dev") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.081602 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.083359 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys" (OuterVolumeSpecName: "sys") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.083387 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.083387 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.084717 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd" (OuterVolumeSpecName: "kube-api-access-jqbgd") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "kube-api-access-jqbgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.084885 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph" (OuterVolumeSpecName: "ceph") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.084955 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts" (OuterVolumeSpecName: "scripts") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.096025 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.121538 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data" (OuterVolumeSpecName: "config-data") pod "44b0cb2c-149f-4a9d-a494-cb9542391b4f" (UID: "44b0cb2c-149f-4a9d-a494-cb9542391b4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.159480 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182528 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182559 4728 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-brick\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182569 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44b0cb2c-149f-4a9d-a494-cb9542391b4f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182597 4728 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-lib-modules\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182605 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182614 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182622 4728 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-iscsi\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182631 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqbgd\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-kube-api-access-jqbgd\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182639 4728 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182647 4728 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182768 4728 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-etc-nvme\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182780 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3f719999-317e-4c1f-9d0d-53c9d1277e14-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182819 4728 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-dev\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182829 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182837 4728 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-sys\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.182845 4728 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3f719999-317e-4c1f-9d0d-53c9d1277e14-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.197970 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data" (OuterVolumeSpecName: "config-data") pod "3f719999-317e-4c1f-9d0d-53c9d1277e14" (UID: "3f719999-317e-4c1f-9d0d-53c9d1277e14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.242553 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.284896 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f719999-317e-4c1f-9d0d-53c9d1277e14-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.366315 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7faef5d-1c9d-4820-a97e-22486f06f9ce" path="/var/lib/kubelet/pods/f7faef5d-1c9d-4820-a97e-22486f06f9ce/volumes" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.473922 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"44b0cb2c-149f-4a9d-a494-cb9542391b4f","Type":"ContainerDied","Data":"5b78811fad174433d7565eb792615c95d0daa3bce86bb98e959b2fdcee9a64e2"} Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.473976 4728 scope.go:117] "RemoveContainer" containerID="794b1f4b7e349796e5084b3141fc0d98aba909f56faa9534d2a57dfe8e876706" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.474151 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.486754 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3f719999-317e-4c1f-9d0d-53c9d1277e14","Type":"ContainerDied","Data":"f524fdce08c1f6cad215a756b52bf41b478474abefde28ab4043e41ff28ae4d0"} Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.486845 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.486903 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.519922 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.522534 4728 scope.go:117] "RemoveContainer" containerID="2e32be107c6c7a3cc13b2c62a76dd6b7745d7de8931f65993b5363cf07b691ee" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.566281 4728 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e1c36796-b303-4cf6-908d-d41c2a16f5b6" podUID="6860f6fe-8127-4cbd-af2d-7e5e0e4ed001" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.579343 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.590510 4728 scope.go:117] "RemoveContainer" containerID="425007df221e112a565a0188f7d6bd3e60cdbec2e9a8b8d7180f5eb19aa8a152" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.591759 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret\") pod \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.591852 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76xcs\" (UniqueName: \"kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs\") pod \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.592117 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle\") pod \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.592317 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config\") pod \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\" (UID: \"e1c36796-b303-4cf6-908d-d41c2a16f5b6\") " Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.597193 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.597906 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e1c36796-b303-4cf6-908d-d41c2a16f5b6" (UID: "e1c36796-b303-4cf6-908d-d41c2a16f5b6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.601946 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1c36796-b303-4cf6-908d-d41c2a16f5b6" (UID: "e1c36796-b303-4cf6-908d-d41c2a16f5b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.612656 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e1c36796-b303-4cf6-908d-d41c2a16f5b6" (UID: "e1c36796-b303-4cf6-908d-d41c2a16f5b6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.613353 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs" (OuterVolumeSpecName: "kube-api-access-76xcs") pod "e1c36796-b303-4cf6-908d-d41c2a16f5b6" (UID: "e1c36796-b303-4cf6-908d-d41c2a16f5b6"). InnerVolumeSpecName "kube-api-access-76xcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.629261 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.646884 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668024 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: E1205 11:30:44.668484 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="cinder-volume" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668495 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="cinder-volume" Dec 05 11:30:44 crc kubenswrapper[4728]: E1205 11:30:44.668514 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668520 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: E1205 11:30:44.668545 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="cinder-scheduler" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668551 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="cinder-scheduler" Dec 05 11:30:44 crc kubenswrapper[4728]: E1205 11:30:44.668565 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668571 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668768 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="cinder-volume" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668776 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668784 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" containerName="cinder-scheduler" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.668819 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" containerName="probe" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.669858 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.682525 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.688934 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.694221 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.694242 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76xcs\" (UniqueName: \"kubernetes.io/projected/e1c36796-b303-4cf6-908d-d41c2a16f5b6-kube-api-access-76xcs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.694254 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1c36796-b303-4cf6-908d-d41c2a16f5b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.694262 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e1c36796-b303-4cf6-908d-d41c2a16f5b6-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.716153 4728 scope.go:117] "RemoveContainer" containerID="a8bf728812863067c249dc15bb0b97b2388d44dc7ff636fd27cfdc36b914e72c" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.723375 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.725422 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.734589 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.748949 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795838 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-run\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795904 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j796\" (UniqueName: \"kubernetes.io/projected/61360ddf-d4ef-4328-add9-ac6c2d95d563-kube-api-access-8j796\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795931 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wptdd\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-kube-api-access-wptdd\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795970 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.795985 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796003 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796025 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-dev\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796047 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-sys\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796065 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796086 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796106 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-scripts\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796124 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61360ddf-d4ef-4328-add9-ac6c2d95d563-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796145 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796243 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796276 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796299 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796319 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796341 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796360 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796389 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.796404 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.869646 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898120 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898165 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898195 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898218 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898265 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898293 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898309 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898349 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-run\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898372 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j796\" (UniqueName: \"kubernetes.io/projected/61360ddf-d4ef-4328-add9-ac6c2d95d563-kube-api-access-8j796\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898397 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wptdd\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-kube-api-access-wptdd\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898414 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898433 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898449 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898469 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898493 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-dev\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898515 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-sys\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898532 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898550 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898570 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-scripts\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.898585 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61360ddf-d4ef-4328-add9-ac6c2d95d563-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.899155 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.899675 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.899745 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-run\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.899815 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.899836 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900227 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61360ddf-d4ef-4328-add9-ac6c2d95d563-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900258 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900251 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-dev\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900289 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900336 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900371 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.900399 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/23cf88b0-870f-44f4-9f15-aa4b15d86a12-sys\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.906622 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-scripts\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.906746 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.908459 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.908850 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.908951 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-config-data\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.909267 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.909734 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.911152 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23cf88b0-870f-44f4-9f15-aa4b15d86a12-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.919243 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61360ddf-d4ef-4328-add9-ac6c2d95d563-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.922025 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wptdd\" (UniqueName: \"kubernetes.io/projected/23cf88b0-870f-44f4-9f15-aa4b15d86a12-kube-api-access-wptdd\") pod \"cinder-volume-volume1-0\" (UID: \"23cf88b0-870f-44f4-9f15-aa4b15d86a12\") " pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:44 crc kubenswrapper[4728]: I1205 11:30:44.927178 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j796\" (UniqueName: \"kubernetes.io/projected/61360ddf-d4ef-4328-add9-ac6c2d95d563-kube-api-access-8j796\") pod \"cinder-scheduler-0\" (UID: \"61360ddf-d4ef-4328-add9-ac6c2d95d563\") " pod="openstack/cinder-scheduler-0" Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.017599 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.057817 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.528186 4728 generic.go:334] "Generic (PLEG): container finished" podID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" containerID="8ef8f65ad94c428c88af66587e1fa39c7c9a3e85582c08325973ab5fff66cdaa" exitCode=0 Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.528432 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-zbbv6" event={"ID":"537c7276-c2c9-4427-9b2b-5e835e3bc2d7","Type":"ContainerDied","Data":"8ef8f65ad94c428c88af66587e1fa39c7c9a3e85582c08325973ab5fff66cdaa"} Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.533488 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.534168 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001","Type":"ContainerStarted","Data":"3080f58a0c94b7fc9c8d53a3fda743de03811019b55e9854296924b405ff1449"} Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.537517 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.658046 4728 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e1c36796-b303-4cf6-908d-d41c2a16f5b6" podUID="6860f6fe-8127-4cbd-af2d-7e5e0e4ed001" Dec 05 11:30:45 crc kubenswrapper[4728]: I1205 11:30:45.731593 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.382238 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f719999-317e-4c1f-9d0d-53c9d1277e14" path="/var/lib/kubelet/pods/3f719999-317e-4c1f-9d0d-53c9d1277e14/volumes" Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.384133 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44b0cb2c-149f-4a9d-a494-cb9542391b4f" path="/var/lib/kubelet/pods/44b0cb2c-149f-4a9d-a494-cb9542391b4f/volumes" Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.385219 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1c36796-b303-4cf6-908d-d41c2a16f5b6" path="/var/lib/kubelet/pods/e1c36796-b303-4cf6-908d-d41c2a16f5b6/volumes" Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.548275 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"61360ddf-d4ef-4328-add9-ac6c2d95d563","Type":"ContainerStarted","Data":"5304268afebc08705aa9cfd1bda06a9b00609659761bd212962c7f65f2b6af55"} Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.548323 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"61360ddf-d4ef-4328-add9-ac6c2d95d563","Type":"ContainerStarted","Data":"22f2e83c5cc6912d29457fd04c15122006cb94ac70ebe507876e892db9025d5e"} Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.554903 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"23cf88b0-870f-44f4-9f15-aa4b15d86a12","Type":"ContainerStarted","Data":"1f5530b797fe2a4405f4ecc3fa9f7d6e160e9018fd7e5e2a237e5e3805089e27"} Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.554976 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"23cf88b0-870f-44f4-9f15-aa4b15d86a12","Type":"ContainerStarted","Data":"ddf77933502556e323ee94d9e65f5707b533d37ddad3f1a7a273456da8458439"} Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.554989 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"23cf88b0-870f-44f4-9f15-aa4b15d86a12","Type":"ContainerStarted","Data":"ad79055d0c5ee41e9e793335663396dba4c5b13b5231dc494153fb8ff58c79b2"} Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.584559 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.584538495 podStartE2EDuration="2.584538495s" podCreationTimestamp="2025-12-05 11:30:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:46.578660424 +0000 UTC m=+1380.720783137" watchObservedRunningTime="2025-12-05 11:30:46.584538495 +0000 UTC m=+1380.726661188" Dec 05 11:30:46 crc kubenswrapper[4728]: I1205 11:30:46.714499 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.103305 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-zbbv6" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.147878 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6bf4577867-xpjkr"] Dec 05 11:30:47 crc kubenswrapper[4728]: E1205 11:30:47.148362 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" containerName="manila-db-sync" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.148382 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" containerName="manila-db-sync" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.148643 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" containerName="manila-db-sync" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.149862 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.157405 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.157686 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.158822 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.168621 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data\") pod \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.169859 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle\") pod \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.169958 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-td8b6\" (UniqueName: \"kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6\") pod \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.170158 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data\") pod \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\" (UID: \"537c7276-c2c9-4427-9b2b-5e835e3bc2d7\") " Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.195365 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6" (OuterVolumeSpecName: "kube-api-access-td8b6") pod "537c7276-c2c9-4427-9b2b-5e835e3bc2d7" (UID: "537c7276-c2c9-4427-9b2b-5e835e3bc2d7"). InnerVolumeSpecName "kube-api-access-td8b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.202698 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6bf4577867-xpjkr"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.221043 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data" (OuterVolumeSpecName: "config-data") pod "537c7276-c2c9-4427-9b2b-5e835e3bc2d7" (UID: "537c7276-c2c9-4427-9b2b-5e835e3bc2d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.223837 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "537c7276-c2c9-4427-9b2b-5e835e3bc2d7" (UID: "537c7276-c2c9-4427-9b2b-5e835e3bc2d7"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.261224 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.272814 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-log-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.272860 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-public-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.272892 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-combined-ca-bundle\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.272918 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svlt4\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-kube-api-access-svlt4\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.272935 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-etc-swift\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273001 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-config-data\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273045 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-internal-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273065 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-run-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273108 4728 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273118 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-td8b6\" (UniqueName: \"kubernetes.io/projected/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-kube-api-access-td8b6\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.273129 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.288223 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "537c7276-c2c9-4427-9b2b-5e835e3bc2d7" (UID: "537c7276-c2c9-4427-9b2b-5e835e3bc2d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.374757 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-config-data\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375056 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-internal-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375163 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-run-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375283 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-log-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375357 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-public-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375420 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-combined-ca-bundle\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375490 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svlt4\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-kube-api-access-svlt4\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375567 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-etc-swift\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.375758 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/537c7276-c2c9-4427-9b2b-5e835e3bc2d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.386727 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-run-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.388057 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e762b75-33c7-464f-a8a9-316b5209b2b3-log-httpd\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.394611 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-config-data\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.397541 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-combined-ca-bundle\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.408429 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-public-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.409559 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-etc-swift\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.410065 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e762b75-33c7-464f-a8a9-316b5209b2b3-internal-tls-certs\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.414871 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svlt4\" (UniqueName: \"kubernetes.io/projected/9e762b75-33c7-464f-a8a9-316b5209b2b3-kube-api-access-svlt4\") pod \"swift-proxy-6bf4577867-xpjkr\" (UID: \"9e762b75-33c7-464f-a8a9-316b5209b2b3\") " pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.498969 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.579099 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-zbbv6" event={"ID":"537c7276-c2c9-4427-9b2b-5e835e3bc2d7","Type":"ContainerDied","Data":"db6d68d8ddfc5b058bd6f7baf5a35db447bb253ee5fa3818568176ef8eb0b1bc"} Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.579176 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db6d68d8ddfc5b058bd6f7baf5a35db447bb253ee5fa3818568176ef8eb0b1bc" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.579244 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-zbbv6" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.588382 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"61360ddf-d4ef-4328-add9-ac6c2d95d563","Type":"ContainerStarted","Data":"9449eb5af608dcf0ac77246d07a779fbe67556d3276c0f283ce8f4bbdd82348e"} Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.649211 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.649193435 podStartE2EDuration="3.649193435s" podCreationTimestamp="2025-12-05 11:30:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:47.647089407 +0000 UTC m=+1381.789212120" watchObservedRunningTime="2025-12-05 11:30:47.649193435 +0000 UTC m=+1381.791316128" Dec 05 11:30:47 crc kubenswrapper[4728]: E1205 11:30:47.688887 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod537c7276_c2c9_4427_9b2b_5e835e3bc2d7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.774854 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.776549 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.783056 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-75ffx" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.783281 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.783842 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.789846 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.794362 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.828873 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.830480 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.832526 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.858965 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.909319 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.909383 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq2tt\" (UniqueName: \"kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.909448 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.917742 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.917840 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.917861 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.917949 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.917984 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918132 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918178 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918214 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918262 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918292 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flzfl\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.918400 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.929559 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.940545 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:47 crc kubenswrapper[4728]: I1205 11:30:47.973639 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022039 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022086 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flzfl\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022121 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5xfg\" (UniqueName: \"kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022146 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022162 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022191 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022222 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022243 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022264 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq2tt\" (UniqueName: \"kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022310 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022346 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022371 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022386 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022399 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022413 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022431 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022454 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022485 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022506 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.022525 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.023531 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.023649 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.032829 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.033467 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.042407 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.044387 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.048216 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.049686 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.049879 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.062104 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.068442 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.070354 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.070655 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.070679 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.074483 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq2tt\" (UniqueName: \"kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.074581 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.074670 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flzfl\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.074779 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.103933 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124275 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124607 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124642 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5xfg\" (UniqueName: \"kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124668 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124735 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124770 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124790 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124852 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2slb8\" (UniqueName: \"kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124869 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124900 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124930 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.124961 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.125793 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.127499 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.128119 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.128643 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.129223 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.155764 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5xfg\" (UniqueName: \"kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg\") pod \"dnsmasq-dns-56696ff475-f9ztl\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.161989 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.191712 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.191988 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-central-agent" containerID="cri-o://9abebf2f45df1f6cb123002e4ef7f9188cc958011fd013fb816cf7d5beae1565" gracePeriod=30 Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.192625 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="proxy-httpd" containerID="cri-o://e47102520c329e24dd4df17569516824f4fdfaa6fe2561775d2ca6e43705cd57" gracePeriod=30 Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.192689 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="sg-core" containerID="cri-o://7f41e6e21f1e69685f975c1ac3340149f69d19beace7a8567c1beb0451d047c7" gracePeriod=30 Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.192722 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-notification-agent" containerID="cri-o://129b9328641f752d80d23c6938388874ff7c60006f7a0efdc7c1d0b697ae5cfb" gracePeriod=30 Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226367 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2slb8\" (UniqueName: \"kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226413 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226450 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226517 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226536 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226610 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.226630 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.227006 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.230358 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.230754 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.172:3000/\": EOF" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.236482 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.239032 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.244453 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.248240 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.258784 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2slb8\" (UniqueName: \"kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8\") pod \"manila-api-0\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.267009 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6bf4577867-xpjkr"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.311432 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.395318 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.620431 4728 generic.go:334] "Generic (PLEG): container finished" podID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerID="7f41e6e21f1e69685f975c1ac3340149f69d19beace7a8567c1beb0451d047c7" exitCode=2 Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.620913 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerDied","Data":"7f41e6e21f1e69685f975c1ac3340149f69d19beace7a8567c1beb0451d047c7"} Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.624575 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6bf4577867-xpjkr" event={"ID":"9e762b75-33c7-464f-a8a9-316b5209b2b3","Type":"ContainerStarted","Data":"4fe89674470729b4ea534685642cbe683e1e3f011cb39fa6550e70add6c5a3d7"} Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.800297 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:30:48 crc kubenswrapper[4728]: I1205 11:30:48.910327 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.064332 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:30:49 crc kubenswrapper[4728]: W1205 11:30:49.064376 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode01300fa_c016_435f_9d98_325203486428.slice/crio-cdcbfdbc00788ae5f33986021c4354c7a891a878f9d4385367a6367c78174faa WatchSource:0}: Error finding container cdcbfdbc00788ae5f33986021c4354c7a891a878f9d4385367a6367c78174faa: Status 404 returned error can't find the container with id cdcbfdbc00788ae5f33986021c4354c7a891a878f9d4385367a6367c78174faa Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.371407 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.687421 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerStarted","Data":"7f4fe89bc454ca8fc43e796a4b3867d48a77b70a164deb6dd25bc1c96ad07bba"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.689310 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerStarted","Data":"bf182e459ea8db90cc795cccd8321636fa604bb00828db49a940f3b1cd9d23e5"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.694094 4728 generic.go:334] "Generic (PLEG): container finished" podID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerID="e47102520c329e24dd4df17569516824f4fdfaa6fe2561775d2ca6e43705cd57" exitCode=0 Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.694384 4728 generic.go:334] "Generic (PLEG): container finished" podID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerID="9abebf2f45df1f6cb123002e4ef7f9188cc958011fd013fb816cf7d5beae1565" exitCode=0 Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.694175 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerDied","Data":"e47102520c329e24dd4df17569516824f4fdfaa6fe2561775d2ca6e43705cd57"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.694479 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerDied","Data":"9abebf2f45df1f6cb123002e4ef7f9188cc958011fd013fb816cf7d5beae1565"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.702405 4728 generic.go:334] "Generic (PLEG): container finished" podID="e01300fa-c016-435f-9d98-325203486428" containerID="0a3838e0bbad998aeec7c54987def9d1fedf50726efc85b333d98e736b690973" exitCode=0 Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.702492 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" event={"ID":"e01300fa-c016-435f-9d98-325203486428","Type":"ContainerDied","Data":"0a3838e0bbad998aeec7c54987def9d1fedf50726efc85b333d98e736b690973"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.702543 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" event={"ID":"e01300fa-c016-435f-9d98-325203486428","Type":"ContainerStarted","Data":"cdcbfdbc00788ae5f33986021c4354c7a891a878f9d4385367a6367c78174faa"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.711251 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6bf4577867-xpjkr" event={"ID":"9e762b75-33c7-464f-a8a9-316b5209b2b3","Type":"ContainerStarted","Data":"dff3817b06ba454052d7d2af502d8308e66e2ce345dbec2aa1f2556c1ecfe339"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.711305 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6bf4577867-xpjkr" event={"ID":"9e762b75-33c7-464f-a8a9-316b5209b2b3","Type":"ContainerStarted","Data":"35bfc774abafe2b54f8546ff6a88b26e976cb4ce7bbdfd5b2f5d5318811e5b0a"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.712297 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.712319 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.736232 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerStarted","Data":"ec7428cb059ca4a77ca17a5964602a3b1e036795971cabc8d8267be4dbbd40d6"} Dec 05 11:30:49 crc kubenswrapper[4728]: I1205 11:30:49.778111 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6bf4577867-xpjkr" podStartSLOduration=2.778089172 podStartE2EDuration="2.778089172s" podCreationTimestamp="2025-12-05 11:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:49.756587524 +0000 UTC m=+1383.898710217" watchObservedRunningTime="2025-12-05 11:30:49.778089172 +0000 UTC m=+1383.920211865" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.018548 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.058369 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.752933 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerStarted","Data":"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1"} Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.755424 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" event={"ID":"e01300fa-c016-435f-9d98-325203486428","Type":"ContainerStarted","Data":"3f84844352374fd98447dc00dc547415d652ffffbd1b426deb3aeb4a7fe50fda"} Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.755624 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.759362 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerStarted","Data":"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6"} Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.785095 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" podStartSLOduration=3.785017064 podStartE2EDuration="3.785017064s" podCreationTimestamp="2025-12-05 11:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:50.77644631 +0000 UTC m=+1384.918569023" watchObservedRunningTime="2025-12-05 11:30:50.785017064 +0000 UTC m=+1384.927139757" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.893691 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Dec 05 11:30:50 crc kubenswrapper[4728]: I1205 11:30:50.980736 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.778370 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerStarted","Data":"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957"} Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.778922 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.778433 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api-log" containerID="cri-o://054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" gracePeriod=30 Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.778487 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api" containerID="cri-o://d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" gracePeriod=30 Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.784733 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerStarted","Data":"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7"} Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.794497 4728 generic.go:334] "Generic (PLEG): container finished" podID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerID="129b9328641f752d80d23c6938388874ff7c60006f7a0efdc7c1d0b697ae5cfb" exitCode=0 Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.794550 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerDied","Data":"129b9328641f752d80d23c6938388874ff7c60006f7a0efdc7c1d0b697ae5cfb"} Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.807770 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.807746078 podStartE2EDuration="3.807746078s" podCreationTimestamp="2025-12-05 11:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:51.803710598 +0000 UTC m=+1385.945833311" watchObservedRunningTime="2025-12-05 11:30:51.807746078 +0000 UTC m=+1385.949868771" Dec 05 11:30:51 crc kubenswrapper[4728]: I1205 11:30:51.833651 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=4.079378649 podStartE2EDuration="4.833633426s" podCreationTimestamp="2025-12-05 11:30:47 +0000 UTC" firstStartedPulling="2025-12-05 11:30:48.803637658 +0000 UTC m=+1382.945760351" lastFinishedPulling="2025-12-05 11:30:49.557892435 +0000 UTC m=+1383.700015128" observedRunningTime="2025-12-05 11:30:51.826653805 +0000 UTC m=+1385.968776508" watchObservedRunningTime="2025-12-05 11:30:51.833633426 +0000 UTC m=+1385.975756119" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.076343 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.180361 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.255850 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gprw\" (UniqueName: \"kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.255942 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.255999 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.256103 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.256202 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.256245 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.256281 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle\") pod \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\" (UID: \"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.264009 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.264138 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.269386 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw" (OuterVolumeSpecName: "kube-api-access-2gprw") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "kube-api-access-2gprw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.269506 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts" (OuterVolumeSpecName: "scripts") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.300993 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.357846 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.357878 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gprw\" (UniqueName: \"kubernetes.io/projected/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-kube-api-access-2gprw\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.357889 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.357899 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.357910 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.360833 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.400354 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data" (OuterVolumeSpecName: "config-data") pod "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" (UID: "e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.462014 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.462266 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.474017 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.563875 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.563949 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.563985 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2slb8\" (UniqueName: \"kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.564080 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.564162 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.564183 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.564256 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs\") pod \"579f6107-57c9-4a44-b577-5a7f4b753366\" (UID: \"579f6107-57c9-4a44-b577-5a7f4b753366\") " Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.564939 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.565383 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/579f6107-57c9-4a44-b577-5a7f4b753366-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.567077 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs" (OuterVolumeSpecName: "logs") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.570214 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.570634 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8" (OuterVolumeSpecName: "kube-api-access-2slb8") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "kube-api-access-2slb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.572333 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts" (OuterVolumeSpecName: "scripts") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.605632 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.637437 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data" (OuterVolumeSpecName: "config-data") pod "579f6107-57c9-4a44-b577-5a7f4b753366" (UID: "579f6107-57c9-4a44-b577-5a7f4b753366"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671379 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671417 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2slb8\" (UniqueName: \"kubernetes.io/projected/579f6107-57c9-4a44-b577-5a7f4b753366-kube-api-access-2slb8\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671429 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671440 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671451 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/579f6107-57c9-4a44-b577-5a7f4b753366-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.671461 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/579f6107-57c9-4a44-b577-5a7f4b753366-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.818756 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f","Type":"ContainerDied","Data":"34573b7eccd412b610bbb3790db86f1f08430e7fb7d3f13a8dd085daca5c02c4"} Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.818840 4728 scope.go:117] "RemoveContainer" containerID="e47102520c329e24dd4df17569516824f4fdfaa6fe2561775d2ca6e43705cd57" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.818850 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.824775 4728 generic.go:334] "Generic (PLEG): container finished" podID="579f6107-57c9-4a44-b577-5a7f4b753366" containerID="d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" exitCode=143 Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.824824 4728 generic.go:334] "Generic (PLEG): container finished" podID="579f6107-57c9-4a44-b577-5a7f4b753366" containerID="054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" exitCode=143 Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.826017 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.827319 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerDied","Data":"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957"} Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.827379 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerDied","Data":"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6"} Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.827396 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"579f6107-57c9-4a44-b577-5a7f4b753366","Type":"ContainerDied","Data":"7f4fe89bc454ca8fc43e796a4b3867d48a77b70a164deb6dd25bc1c96ad07bba"} Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.891770 4728 scope.go:117] "RemoveContainer" containerID="7f41e6e21f1e69685f975c1ac3340149f69d19beace7a8567c1beb0451d047c7" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.908176 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.947808 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.952676 4728 scope.go:117] "RemoveContainer" containerID="129b9328641f752d80d23c6938388874ff7c60006f7a0efdc7c1d0b697ae5cfb" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.959192 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.967672 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.974449 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975400 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975432 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api" Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975451 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="proxy-httpd" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975458 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="proxy-httpd" Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975478 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api-log" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975484 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api-log" Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975511 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-central-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975518 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-central-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975531 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-notification-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975539 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-notification-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: E1205 11:30:52.975547 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="sg-core" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975554 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="sg-core" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975930 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="sg-core" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975953 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api-log" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975976 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-central-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.975991 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="proxy-httpd" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.976006 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" containerName="manila-api" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.976018 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" containerName="ceilometer-notification-agent" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.982206 4728 scope.go:117] "RemoveContainer" containerID="9abebf2f45df1f6cb123002e4ef7f9188cc958011fd013fb816cf7d5beae1565" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.984714 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.992565 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.993436 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.993657 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:30:52 crc kubenswrapper[4728]: I1205 11:30:52.994837 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:52.997858 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:52.998005 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:52.998067 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.054972 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.089263 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091823 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmkl9\" (UniqueName: \"kubernetes.io/projected/686f83cd-910d-4bf2-977a-8544326152e4-kube-api-access-fmkl9\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091880 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/686f83cd-910d-4bf2-977a-8544326152e4-logs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091943 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091960 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-public-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091972 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.091997 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092016 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092032 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-internal-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092110 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/686f83cd-910d-4bf2-977a-8544326152e4-etc-machine-id\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092133 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092148 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58kc5\" (UniqueName: \"kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092168 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-scripts\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092183 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data-custom\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092230 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.092328 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.103716 4728 scope.go:117] "RemoveContainer" containerID="d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.179011 4728 scope.go:117] "RemoveContainer" containerID="054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.195920 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/686f83cd-910d-4bf2-977a-8544326152e4-etc-machine-id\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196472 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196503 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58kc5\" (UniqueName: \"kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196527 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-scripts\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196542 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data-custom\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196584 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196601 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196624 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmkl9\" (UniqueName: \"kubernetes.io/projected/686f83cd-910d-4bf2-977a-8544326152e4-kube-api-access-fmkl9\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196640 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196656 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/686f83cd-910d-4bf2-977a-8544326152e4-logs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196703 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196721 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-public-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196736 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196760 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196777 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.196813 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-internal-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.197107 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/686f83cd-910d-4bf2-977a-8544326152e4-etc-machine-id\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.197470 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.197966 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.198275 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/686f83cd-910d-4bf2-977a-8544326152e4-logs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.207340 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data-custom\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.208463 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-internal-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.208717 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.209931 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.211192 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-config-data\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.211356 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.212190 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.213023 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.214728 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-scripts\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.217581 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmkl9\" (UniqueName: \"kubernetes.io/projected/686f83cd-910d-4bf2-977a-8544326152e4-kube-api-access-fmkl9\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.230576 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58kc5\" (UniqueName: \"kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5\") pod \"ceilometer-0\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.438504 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.438778 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/686f83cd-910d-4bf2-977a-8544326152e4-public-tls-certs\") pod \"manila-api-0\" (UID: \"686f83cd-910d-4bf2-977a-8544326152e4\") " pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.451871 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.455837 4728 scope.go:117] "RemoveContainer" containerID="d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" Dec 05 11:30:53 crc kubenswrapper[4728]: E1205 11:30:53.457563 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957\": container with ID starting with d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957 not found: ID does not exist" containerID="d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.457644 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957"} err="failed to get container status \"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957\": rpc error: code = NotFound desc = could not find container \"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957\": container with ID starting with d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957 not found: ID does not exist" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.457702 4728 scope.go:117] "RemoveContainer" containerID="054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" Dec 05 11:30:53 crc kubenswrapper[4728]: E1205 11:30:53.458136 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6\": container with ID starting with 054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6 not found: ID does not exist" containerID="054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.458163 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6"} err="failed to get container status \"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6\": rpc error: code = NotFound desc = could not find container \"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6\": container with ID starting with 054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6 not found: ID does not exist" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.458185 4728 scope.go:117] "RemoveContainer" containerID="d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.458528 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957"} err="failed to get container status \"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957\": rpc error: code = NotFound desc = could not find container \"d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957\": container with ID starting with d91af35018b7e8a9cf77a3bbd84da18992bffddf6f2605a1495ff49d2641e957 not found: ID does not exist" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.458563 4728 scope.go:117] "RemoveContainer" containerID="054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6" Dec 05 11:30:53 crc kubenswrapper[4728]: I1205 11:30:53.468075 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6"} err="failed to get container status \"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6\": rpc error: code = NotFound desc = could not find container \"054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6\": container with ID starting with 054ba426a1b45e16fde3869fcc1da4276f5c3259f75db882f5e1dc86c189e0d6 not found: ID does not exist" Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.075333 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.140924 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.163577 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.365145 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="579f6107-57c9-4a44-b577-5a7f4b753366" path="/var/lib/kubelet/pods/579f6107-57c9-4a44-b577-5a7f4b753366/volumes" Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.366235 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f" path="/var/lib/kubelet/pods/e2ab1e8b-3beb-442c-9bbe-e766a7a2a87f/volumes" Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.896000 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerStarted","Data":"400354a3f91ef641ef023f842c32e3ac5dd97ca0b8074c54becb05794a3f3b59"} Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.899524 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"686f83cd-910d-4bf2-977a-8544326152e4","Type":"ContainerStarted","Data":"7f40d11fc363393cc7e911eeb58b2b7d7bb4ba649d33f938212e58f76e59bdca"} Dec 05 11:30:54 crc kubenswrapper[4728]: I1205 11:30:54.899564 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"686f83cd-910d-4bf2-977a-8544326152e4","Type":"ContainerStarted","Data":"3156e1a1a5cbfc5d71791ce8ba2ce170124080c264e116783ec5714d96e2f6ab"} Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.289717 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.355131 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.701839 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.701905 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.910119 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"686f83cd-910d-4bf2-977a-8544326152e4","Type":"ContainerStarted","Data":"3f72613951fd4bcb2f17256a6f63346f02152e5c0ddde1d2f9f55866deb85a2d"} Dec 05 11:30:55 crc kubenswrapper[4728]: I1205 11:30:55.912509 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerStarted","Data":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} Dec 05 11:30:56 crc kubenswrapper[4728]: I1205 11:30:56.155417 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:30:56 crc kubenswrapper[4728]: I1205 11:30:56.922601 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 05 11:30:56 crc kubenswrapper[4728]: I1205 11:30:56.945607 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.945589048 podStartE2EDuration="4.945589048s" podCreationTimestamp="2025-12-05 11:30:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:30:56.944019115 +0000 UTC m=+1391.086141808" watchObservedRunningTime="2025-12-05 11:30:56.945589048 +0000 UTC m=+1391.087711741" Dec 05 11:30:57 crc kubenswrapper[4728]: I1205 11:30:57.510156 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:57 crc kubenswrapper[4728]: I1205 11:30:57.533296 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6bf4577867-xpjkr" Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.105944 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.312960 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.388269 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.388536 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" containerID="cri-o://6dd9d3a34dedd9407e02a156620c7d206a6463ca97d95c323314e1a916f07809" gracePeriod=10 Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.953442 4728 generic.go:334] "Generic (PLEG): container finished" podID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerID="6dd9d3a34dedd9407e02a156620c7d206a6463ca97d95c323314e1a916f07809" exitCode=0 Dec 05 11:30:58 crc kubenswrapper[4728]: I1205 11:30:58.953494 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" event={"ID":"296a9db8-738c-4a95-87ed-3037d5b6ddf5","Type":"ContainerDied","Data":"6dd9d3a34dedd9407e02a156620c7d206a6463ca97d95c323314e1a916f07809"} Dec 05 11:31:00 crc kubenswrapper[4728]: I1205 11:31:00.894044 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d7bdb6c68-cfbgd" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Dec 05 11:31:01 crc kubenswrapper[4728]: I1205 11:31:01.053900 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.168:5353: connect: connection refused" Dec 05 11:31:05 crc kubenswrapper[4728]: E1205 11:31:05.804525 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-manila-share:current-podified" Dec 05 11:31:05 crc kubenswrapper[4728]: E1205 11:31:05.805165 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manila-share,Image:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,Command:[/usr/bin/dumb-init],Args:[--single-child -- /bin/bash -c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n95hd8h576h544h78h648h5d9hcbh6fhch66hc8h595h5f9h556h79h54bh5b9h576h9h5d4hc9h698h565h555h5bbh5fh54ch59dh6dh68ch5d9q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:MALLOC_ARENA_MAX,Value:1,ValueFrom:nil,},EnvVar{Name:MALLOC_MMAP_THRESHOLD_,Value:131072,ValueFrom:nil,},EnvVar{Name:MALLOC_TRIM_THRESHOLD_,Value:262144,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-custom,ReadOnly:true,MountPath:/etc/manila/manila.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib-manila,ReadOnly:false,MountPath:/var/lib/manila,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:manila-share-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-flzfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:20,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*42429,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:10,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:12,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-share-share1-0_openstack(d203e1b2-68bf-458f-b1de-b590da34a559): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:31:05 crc kubenswrapper[4728]: E1205 11:31:05.821578 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manila-share\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"probe\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\"]" pod="openstack/manila-share-share1-0" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.031243 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" event={"ID":"296a9db8-738c-4a95-87ed-3037d5b6ddf5","Type":"ContainerDied","Data":"7cda411cf3b26d6f6740e94690883b9e9c3b2f260e17e6f1e4d0aa3f7acdc75b"} Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.031305 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cda411cf3b26d6f6740e94690883b9e9c3b2f260e17e6f1e4d0aa3f7acdc75b" Dec 05 11:31:06 crc kubenswrapper[4728]: E1205 11:31:06.034935 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manila-share\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\", failed to \"StartContainer\" for \"probe\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\"]" pod="openstack/manila-share-share1-0" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.062737 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142297 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnpl6\" (UniqueName: \"kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142580 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142599 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142656 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142684 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.142849 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc\") pod \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\" (UID: \"296a9db8-738c-4a95-87ed-3037d5b6ddf5\") " Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.149068 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6" (OuterVolumeSpecName: "kube-api-access-dnpl6") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "kube-api-access-dnpl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.231913 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.241656 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.244669 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.244697 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.244711 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnpl6\" (UniqueName: \"kubernetes.io/projected/296a9db8-738c-4a95-87ed-3037d5b6ddf5-kube-api-access-dnpl6\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.245415 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config" (OuterVolumeSpecName: "config") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.249053 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.256940 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "296a9db8-738c-4a95-87ed-3037d5b6ddf5" (UID: "296a9db8-738c-4a95-87ed-3037d5b6ddf5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.346708 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.346755 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:06 crc kubenswrapper[4728]: I1205 11:31:06.346769 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a9db8-738c-4a95-87ed-3037d5b6ddf5-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.045388 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerStarted","Data":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.045711 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerStarted","Data":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.049205 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.049259 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6860f6fe-8127-4cbd-af2d-7e5e0e4ed001","Type":"ContainerStarted","Data":"d029d710098c99b40fd39b09c5868b72936ecc646179acc55ce77aeda6f3dbf4"} Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.086353 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.1168907790000002 podStartE2EDuration="24.086328934s" podCreationTimestamp="2025-12-05 11:30:43 +0000 UTC" firstStartedPulling="2025-12-05 11:30:44.860315355 +0000 UTC m=+1379.002438038" lastFinishedPulling="2025-12-05 11:31:05.82975349 +0000 UTC m=+1399.971876193" observedRunningTime="2025-12-05 11:31:07.071979542 +0000 UTC m=+1401.214102235" watchObservedRunningTime="2025-12-05 11:31:07.086328934 +0000 UTC m=+1401.228451627" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.181474 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.197075 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-xxm6x"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.486363 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-z59x7"] Dec 05 11:31:07 crc kubenswrapper[4728]: E1205 11:31:07.486869 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="init" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.486904 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="init" Dec 05 11:31:07 crc kubenswrapper[4728]: E1205 11:31:07.486924 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.486932 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.487182 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.488303 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.499450 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-z59x7"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.580100 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1044-account-create-update-drt9p"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.581895 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.585978 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.588922 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-wqpdw"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.590159 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.597261 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wqpdw"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.616728 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1044-account-create-update-drt9p"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.625917 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.676807 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.676979 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmfq8\" (UniqueName: \"kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.699912 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-vs4fm"] Dec 05 11:31:07 crc kubenswrapper[4728]: E1205 11:31:07.700925 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.700944 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" Dec 05 11:31:07 crc kubenswrapper[4728]: E1205 11:31:07.700975 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon-log" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.700982 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon-log" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.701413 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon-log" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.701423 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerName="horizon" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.702341 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.744324 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vs4fm"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.779873 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5bbb-account-create-update-2xrjf"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780349 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctnkh\" (UniqueName: \"kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780472 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780507 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780605 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780686 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780734 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.780755 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs\") pod \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\" (UID: \"7c54bdd7-0427-403e-a33d-2d52ec56a7fc\") " Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.781076 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.781478 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs" (OuterVolumeSpecName: "logs") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784315 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55sbb\" (UniqueName: \"kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784331 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784535 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784582 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdqt7\" (UniqueName: \"kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784613 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784699 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784781 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4974j\" (UniqueName: \"kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784839 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784904 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmfq8\" (UniqueName: \"kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.784958 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.785855 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.805014 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh" (OuterVolumeSpecName: "kube-api-access-ctnkh") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "kube-api-access-ctnkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.807932 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.810255 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmfq8\" (UniqueName: \"kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8\") pod \"nova-api-db-create-z59x7\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.811704 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts" (OuterVolumeSpecName: "scripts") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.823233 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5bbb-account-create-update-2xrjf"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.824064 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data" (OuterVolumeSpecName: "config-data") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.824895 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.858195 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "7c54bdd7-0427-403e-a33d-2d52ec56a7fc" (UID: "7c54bdd7-0427-403e-a33d-2d52ec56a7fc"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887441 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4974j\" (UniqueName: \"kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887515 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887590 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55sbb\" (UniqueName: \"kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887620 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887652 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jphxn\" (UniqueName: \"kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887773 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdqt7\" (UniqueName: \"kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887827 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.887944 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888446 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctnkh\" (UniqueName: \"kubernetes.io/projected/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-kube-api-access-ctnkh\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888477 4728 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888496 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888515 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888530 4728 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888546 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c54bdd7-0427-403e-a33d-2d52ec56a7fc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888865 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.888890 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.889448 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.907877 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdqt7\" (UniqueName: \"kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7\") pod \"nova-api-1044-account-create-update-drt9p\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.910376 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.910541 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4974j\" (UniqueName: \"kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j\") pod \"nova-cell1-db-create-vs4fm\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.912122 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55sbb\" (UniqueName: \"kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb\") pod \"nova-cell0-db-create-wqpdw\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.938371 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.961123 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.988712 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-279f-account-create-update-mzbwz"] Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.990827 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.992030 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.994984 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jphxn\" (UniqueName: \"kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.996461 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:07 crc kubenswrapper[4728]: I1205 11:31:07.997539 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.018359 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jphxn\" (UniqueName: \"kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn\") pod \"nova-cell0-5bbb-account-create-update-2xrjf\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.022348 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-279f-account-create-update-mzbwz"] Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.032773 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.034325 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.060718 4728 generic.go:334] "Generic (PLEG): container finished" podID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" containerID="854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054" exitCode=137 Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.061602 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d7bdb6c68-cfbgd" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.062208 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerDied","Data":"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054"} Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.062267 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d7bdb6c68-cfbgd" event={"ID":"7c54bdd7-0427-403e-a33d-2d52ec56a7fc","Type":"ContainerDied","Data":"26204862c5bf40d04605923ab2aea195411bd311bb0d38fc57ec4f8156e1de7d"} Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.062289 4728 scope.go:117] "RemoveContainer" containerID="bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.098613 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls4t9\" (UniqueName: \"kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.098681 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.132924 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.147256 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5d7bdb6c68-cfbgd"] Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.201227 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls4t9\" (UniqueName: \"kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.201557 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.202256 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.229922 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls4t9\" (UniqueName: \"kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9\") pod \"nova-cell1-279f-account-create-update-mzbwz\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.343405 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.370809 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" path="/var/lib/kubelet/pods/296a9db8-738c-4a95-87ed-3037d5b6ddf5/volumes" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.371598 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c54bdd7-0427-403e-a33d-2d52ec56a7fc" path="/var/lib/kubelet/pods/7c54bdd7-0427-403e-a33d-2d52ec56a7fc/volumes" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.376088 4728 scope.go:117] "RemoveContainer" containerID="854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054" Dec 05 11:31:08 crc kubenswrapper[4728]: E1205 11:31:08.377443 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c54bdd7_0427_403e_a33d_2d52ec56a7fc.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c54bdd7_0427_403e_a33d_2d52ec56a7fc.slice/crio-26204862c5bf40d04605923ab2aea195411bd311bb0d38fc57ec4f8156e1de7d\": RecentStats: unable to find data in memory cache]" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.439067 4728 scope.go:117] "RemoveContainer" containerID="bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8" Dec 05 11:31:08 crc kubenswrapper[4728]: E1205 11:31:08.439718 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8\": container with ID starting with bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8 not found: ID does not exist" containerID="bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.439756 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8"} err="failed to get container status \"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8\": rpc error: code = NotFound desc = could not find container \"bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8\": container with ID starting with bf564e7b42883be7caec87b4b0532d5f01da418814c2ceb4858093c4bba6b4a8 not found: ID does not exist" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.439787 4728 scope.go:117] "RemoveContainer" containerID="854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054" Dec 05 11:31:08 crc kubenswrapper[4728]: E1205 11:31:08.440052 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054\": container with ID starting with 854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054 not found: ID does not exist" containerID="854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.440081 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054"} err="failed to get container status \"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054\": rpc error: code = NotFound desc = could not find container \"854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054\": container with ID starting with 854c78ab2e7ce12e906b1b302c9941632db6e57660fe1ac6ea8724721bd7b054 not found: ID does not exist" Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.600104 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wqpdw"] Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.701598 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-z59x7"] Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.800705 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vs4fm"] Dec 05 11:31:08 crc kubenswrapper[4728]: W1205 11:31:08.806368 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf847146_79d3_4259_9e19_f78f94b25dfa.slice/crio-be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6 WatchSource:0}: Error finding container be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6: Status 404 returned error can't find the container with id be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6 Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.822137 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5bbb-account-create-update-2xrjf"] Dec 05 11:31:08 crc kubenswrapper[4728]: W1205 11:31:08.839331 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59f02d46_5d39_4908_bde8_f957fd7eb940.slice/crio-f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961 WatchSource:0}: Error finding container f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961: Status 404 returned error can't find the container with id f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961 Dec 05 11:31:08 crc kubenswrapper[4728]: I1205 11:31:08.863710 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1044-account-create-update-drt9p"] Dec 05 11:31:08 crc kubenswrapper[4728]: W1205 11:31:08.868116 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29d916ac_a71a_454f_bc05_39b8426b4e64.slice/crio-d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562 WatchSource:0}: Error finding container d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562: Status 404 returned error can't find the container with id d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562 Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.076317 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-279f-account-create-update-mzbwz"] Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.082658 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" event={"ID":"59f02d46-5d39-4908-bde8-f957fd7eb940","Type":"ContainerStarted","Data":"f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.090281 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1044-account-create-update-drt9p" event={"ID":"29d916ac-a71a-454f-bc05-39b8426b4e64","Type":"ContainerStarted","Data":"d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.101457 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerStarted","Data":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.101724 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-central-agent" containerID="cri-o://9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" gracePeriod=30 Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.101893 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.101998 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="proxy-httpd" containerID="cri-o://c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" gracePeriod=30 Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.102049 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-notification-agent" containerID="cri-o://6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" gracePeriod=30 Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.102352 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="sg-core" containerID="cri-o://ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" gracePeriod=30 Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.109315 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wqpdw" event={"ID":"29402829-a7a8-4c70-b9d4-b0301b97ab76","Type":"ContainerStarted","Data":"21cd52edcf368b3b529b286788745601d93b95e413f147a661c2a85aade0ac18"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.109370 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wqpdw" event={"ID":"29402829-a7a8-4c70-b9d4-b0301b97ab76","Type":"ContainerStarted","Data":"5d9209d828c466ab39972bc8b6728b96ed5bc3b5515aabd96fb0951f54bd6448"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.116933 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z59x7" event={"ID":"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4","Type":"ContainerStarted","Data":"2c928cf4b635914173e5bb2619fe60ff3885c366f6b01c555b7f1b52cda455cb"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.119437 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vs4fm" event={"ID":"cf847146-79d3-4259-9e19-f78f94b25dfa","Type":"ContainerStarted","Data":"be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6"} Dec 05 11:31:09 crc kubenswrapper[4728]: I1205 11:31:09.142429 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.822955872 podStartE2EDuration="17.142409912s" podCreationTimestamp="2025-12-05 11:30:52 +0000 UTC" firstStartedPulling="2025-12-05 11:30:54.163319429 +0000 UTC m=+1388.305442132" lastFinishedPulling="2025-12-05 11:31:08.482773479 +0000 UTC m=+1402.624896172" observedRunningTime="2025-12-05 11:31:09.132282836 +0000 UTC m=+1403.274405539" watchObservedRunningTime="2025-12-05 11:31:09.142409912 +0000 UTC m=+1403.284532605" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.001236 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046481 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046568 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046621 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046682 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046739 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.046941 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58kc5\" (UniqueName: \"kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5\") pod \"cb320e31-3c04-4469-9a75-52d80531280c\" (UID: \"cb320e31-3c04-4469-9a75-52d80531280c\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.047521 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.047745 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.048099 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.053027 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts" (OuterVolumeSpecName: "scripts") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.053458 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5" (OuterVolumeSpecName: "kube-api-access-58kc5") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "kube-api-access-58kc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.085663 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.113727 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.150133 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58kc5\" (UniqueName: \"kubernetes.io/projected/cb320e31-3c04-4469-9a75-52d80531280c-kube-api-access-58kc5\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.150163 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb320e31-3c04-4469-9a75-52d80531280c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.150173 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.150181 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153128 4728 generic.go:334] "Generic (PLEG): container finished" podID="cb320e31-3c04-4469-9a75-52d80531280c" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153163 4728 generic.go:334] "Generic (PLEG): container finished" podID="cb320e31-3c04-4469-9a75-52d80531280c" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" exitCode=2 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153178 4728 generic.go:334] "Generic (PLEG): container finished" podID="cb320e31-3c04-4469-9a75-52d80531280c" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153187 4728 generic.go:334] "Generic (PLEG): container finished" podID="cb320e31-3c04-4469-9a75-52d80531280c" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153233 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerDied","Data":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153266 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerDied","Data":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153281 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerDied","Data":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153293 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerDied","Data":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153306 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb320e31-3c04-4469-9a75-52d80531280c","Type":"ContainerDied","Data":"400354a3f91ef641ef023f842c32e3ac5dd97ca0b8074c54becb05794a3f3b59"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153324 4728 scope.go:117] "RemoveContainer" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.153493 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.157438 4728 generic.go:334] "Generic (PLEG): container finished" podID="29402829-a7a8-4c70-b9d4-b0301b97ab76" containerID="21cd52edcf368b3b529b286788745601d93b95e413f147a661c2a85aade0ac18" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.157499 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wqpdw" event={"ID":"29402829-a7a8-4c70-b9d4-b0301b97ab76","Type":"ContainerDied","Data":"21cd52edcf368b3b529b286788745601d93b95e413f147a661c2a85aade0ac18"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.170971 4728 generic.go:334] "Generic (PLEG): container finished" podID="ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" containerID="35bc61f1051978215dfb8040ae75392ce02b65cbc988fe46eb74d477d2a26e82" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.171161 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z59x7" event={"ID":"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4","Type":"ContainerDied","Data":"35bc61f1051978215dfb8040ae75392ce02b65cbc988fe46eb74d477d2a26e82"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.177251 4728 generic.go:334] "Generic (PLEG): container finished" podID="cf847146-79d3-4259-9e19-f78f94b25dfa" containerID="369702e09105a37cc53baf80d353a3f6e3dc4beb40f464c71819a77c8deb8bb1" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.177313 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vs4fm" event={"ID":"cf847146-79d3-4259-9e19-f78f94b25dfa","Type":"ContainerDied","Data":"369702e09105a37cc53baf80d353a3f6e3dc4beb40f464c71819a77c8deb8bb1"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.196357 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.199712 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data" (OuterVolumeSpecName: "config-data") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.200851 4728 generic.go:334] "Generic (PLEG): container finished" podID="0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" containerID="1ae60a534d591c5e9f0548999c894388089c96a40e93a06ea8ae76d06f356fe2" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.201033 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" event={"ID":"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d","Type":"ContainerDied","Data":"1ae60a534d591c5e9f0548999c894388089c96a40e93a06ea8ae76d06f356fe2"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.201060 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" event={"ID":"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d","Type":"ContainerStarted","Data":"89e27901ab5cb94aaba863754da377b6e4b8ce1b726ea1c1563acf13c957d15b"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.203912 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb320e31-3c04-4469-9a75-52d80531280c" (UID: "cb320e31-3c04-4469-9a75-52d80531280c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.216831 4728 generic.go:334] "Generic (PLEG): container finished" podID="59f02d46-5d39-4908-bde8-f957fd7eb940" containerID="4d100edc95d8a92026fc013b9fd5ba0428c198c0950a3c471be595a72ec49716" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.216957 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" event={"ID":"59f02d46-5d39-4908-bde8-f957fd7eb940","Type":"ContainerDied","Data":"4d100edc95d8a92026fc013b9fd5ba0428c198c0950a3c471be595a72ec49716"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.218599 4728 generic.go:334] "Generic (PLEG): container finished" podID="29d916ac-a71a-454f-bc05-39b8426b4e64" containerID="0db630bde519961cae74cf61e657588211f101ea3a922fd09ab35117aaacdc41" exitCode=0 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.218641 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1044-account-create-update-drt9p" event={"ID":"29d916ac-a71a-454f-bc05-39b8426b4e64","Type":"ContainerDied","Data":"0db630bde519961cae74cf61e657588211f101ea3a922fd09ab35117aaacdc41"} Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.218815 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="manila-scheduler" containerID="cri-o://352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1" gracePeriod=30 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.218854 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="probe" containerID="cri-o://51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7" gracePeriod=30 Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.251932 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.251971 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb320e31-3c04-4469-9a75-52d80531280c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.258164 4728 scope.go:117] "RemoveContainer" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.286080 4728 scope.go:117] "RemoveContainer" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.332126 4728 scope.go:117] "RemoveContainer" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.355906 4728 scope.go:117] "RemoveContainer" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.356319 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": container with ID starting with c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8 not found: ID does not exist" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.356349 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} err="failed to get container status \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": rpc error: code = NotFound desc = could not find container \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": container with ID starting with c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.356370 4728 scope.go:117] "RemoveContainer" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.357562 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": container with ID starting with ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155 not found: ID does not exist" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.357630 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} err="failed to get container status \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": rpc error: code = NotFound desc = could not find container \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": container with ID starting with ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.357658 4728 scope.go:117] "RemoveContainer" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.364059 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": container with ID starting with 6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6 not found: ID does not exist" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364098 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} err="failed to get container status \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": rpc error: code = NotFound desc = could not find container \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": container with ID starting with 6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364119 4728 scope.go:117] "RemoveContainer" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.364434 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": container with ID starting with 9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1 not found: ID does not exist" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364479 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} err="failed to get container status \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": rpc error: code = NotFound desc = could not find container \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": container with ID starting with 9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364514 4728 scope.go:117] "RemoveContainer" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364833 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} err="failed to get container status \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": rpc error: code = NotFound desc = could not find container \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": container with ID starting with c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.364857 4728 scope.go:117] "RemoveContainer" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.365589 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} err="failed to get container status \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": rpc error: code = NotFound desc = could not find container \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": container with ID starting with ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.365656 4728 scope.go:117] "RemoveContainer" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.365976 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} err="failed to get container status \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": rpc error: code = NotFound desc = could not find container \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": container with ID starting with 6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.366001 4728 scope.go:117] "RemoveContainer" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.366342 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} err="failed to get container status \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": rpc error: code = NotFound desc = could not find container \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": container with ID starting with 9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.366363 4728 scope.go:117] "RemoveContainer" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.367864 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} err="failed to get container status \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": rpc error: code = NotFound desc = could not find container \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": container with ID starting with c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.367892 4728 scope.go:117] "RemoveContainer" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.368371 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} err="failed to get container status \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": rpc error: code = NotFound desc = could not find container \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": container with ID starting with ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.368395 4728 scope.go:117] "RemoveContainer" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.368594 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} err="failed to get container status \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": rpc error: code = NotFound desc = could not find container \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": container with ID starting with 6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.368612 4728 scope.go:117] "RemoveContainer" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.369217 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} err="failed to get container status \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": rpc error: code = NotFound desc = could not find container \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": container with ID starting with 9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.369240 4728 scope.go:117] "RemoveContainer" containerID="c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.371583 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8"} err="failed to get container status \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": rpc error: code = NotFound desc = could not find container \"c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8\": container with ID starting with c21f3c10efc7d4b5d94d1ad593f5799b1933718b0f181b39473dd0b28e01d0b8 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.371625 4728 scope.go:117] "RemoveContainer" containerID="ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.372933 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155"} err="failed to get container status \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": rpc error: code = NotFound desc = could not find container \"ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155\": container with ID starting with ba0e1550419d0ad8638d939916fc5268dafc091875642fb5deef4cc8aeea6155 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.372955 4728 scope.go:117] "RemoveContainer" containerID="6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.373296 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6"} err="failed to get container status \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": rpc error: code = NotFound desc = could not find container \"6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6\": container with ID starting with 6bdf1f8aa61fced34f79c921d6bf92d1d06ad01eb123b8fa4749006c5677bbf6 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.373324 4728 scope.go:117] "RemoveContainer" containerID="9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.373687 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1"} err="failed to get container status \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": rpc error: code = NotFound desc = could not find container \"9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1\": container with ID starting with 9440238024f25c719b0302e9a220ff640cc3848879c619428dd47599daed16e1 not found: ID does not exist" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.481172 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.489307 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.509183 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.511627 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="proxy-httpd" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.511849 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="proxy-httpd" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.511862 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="sg-core" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.511868 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="sg-core" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.511878 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-notification-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.511884 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-notification-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.512005 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-central-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.512014 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-central-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.512316 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513083 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-central-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513106 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="ceilometer-notification-agent" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513119 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="sg-core" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513141 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb320e31-3c04-4469-9a75-52d80531280c" containerName="proxy-httpd" Dec 05 11:31:10 crc kubenswrapper[4728]: E1205 11:31:10.513407 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29402829-a7a8-4c70-b9d4-b0301b97ab76" containerName="mariadb-database-create" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513420 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="29402829-a7a8-4c70-b9d4-b0301b97ab76" containerName="mariadb-database-create" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.513611 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="29402829-a7a8-4c70-b9d4-b0301b97ab76" containerName="mariadb-database-create" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.519905 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.522177 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.523345 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.540832 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.565779 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts\") pod \"29402829-a7a8-4c70-b9d4-b0301b97ab76\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566010 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55sbb\" (UniqueName: \"kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb\") pod \"29402829-a7a8-4c70-b9d4-b0301b97ab76\" (UID: \"29402829-a7a8-4c70-b9d4-b0301b97ab76\") " Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566298 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fff7z\" (UniqueName: \"kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566344 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566389 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566419 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566446 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566468 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566488 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.566583 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29402829-a7a8-4c70-b9d4-b0301b97ab76" (UID: "29402829-a7a8-4c70-b9d4-b0301b97ab76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.570438 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb" (OuterVolumeSpecName: "kube-api-access-55sbb") pod "29402829-a7a8-4c70-b9d4-b0301b97ab76" (UID: "29402829-a7a8-4c70-b9d4-b0301b97ab76"). InnerVolumeSpecName "kube-api-access-55sbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668617 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668682 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668805 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fff7z\" (UniqueName: \"kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668854 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668915 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668954 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.668990 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.669049 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55sbb\" (UniqueName: \"kubernetes.io/projected/29402829-a7a8-4c70-b9d4-b0301b97ab76-kube-api-access-55sbb\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.669062 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29402829-a7a8-4c70-b9d4-b0301b97ab76-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.669508 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.670545 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.675742 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.676465 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.676536 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.676916 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.688562 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fff7z\" (UniqueName: \"kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z\") pod \"ceilometer-0\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " pod="openstack/ceilometer-0" Dec 05 11:31:10 crc kubenswrapper[4728]: I1205 11:31:10.849252 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.053627 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c9776ccc5-xxm6x" podUID="296a9db8-738c-4a95-87ed-3037d5b6ddf5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.168:5353: i/o timeout" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.231459 4728 generic.go:334] "Generic (PLEG): container finished" podID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerID="51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7" exitCode=0 Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.231507 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerDied","Data":"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7"} Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.234819 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wqpdw" event={"ID":"29402829-a7a8-4c70-b9d4-b0301b97ab76","Type":"ContainerDied","Data":"5d9209d828c466ab39972bc8b6728b96ed5bc3b5515aabd96fb0951f54bd6448"} Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.234853 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d9209d828c466ab39972bc8b6728b96ed5bc3b5515aabd96fb0951f54bd6448" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.234994 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wqpdw" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.301960 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.704948 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.797779 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts\") pod \"59f02d46-5d39-4908-bde8-f957fd7eb940\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.797931 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jphxn\" (UniqueName: \"kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn\") pod \"59f02d46-5d39-4908-bde8-f957fd7eb940\" (UID: \"59f02d46-5d39-4908-bde8-f957fd7eb940\") " Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.798647 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59f02d46-5d39-4908-bde8-f957fd7eb940" (UID: "59f02d46-5d39-4908-bde8-f957fd7eb940"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.805036 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn" (OuterVolumeSpecName: "kube-api-access-jphxn") pod "59f02d46-5d39-4908-bde8-f957fd7eb940" (UID: "59f02d46-5d39-4908-bde8-f957fd7eb940"). InnerVolumeSpecName "kube-api-access-jphxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.902075 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59f02d46-5d39-4908-bde8-f957fd7eb940-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:11 crc kubenswrapper[4728]: I1205 11:31:11.902113 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jphxn\" (UniqueName: \"kubernetes.io/projected/59f02d46-5d39-4908-bde8-f957fd7eb940-kube-api-access-jphxn\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.058891 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.063172 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.069576 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.075645 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.105825 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4974j\" (UniqueName: \"kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j\") pod \"cf847146-79d3-4259-9e19-f78f94b25dfa\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106061 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls4t9\" (UniqueName: \"kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9\") pod \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106172 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts\") pod \"cf847146-79d3-4259-9e19-f78f94b25dfa\" (UID: \"cf847146-79d3-4259-9e19-f78f94b25dfa\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106214 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts\") pod \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\" (UID: \"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106231 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdqt7\" (UniqueName: \"kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7\") pod \"29d916ac-a71a-454f-bc05-39b8426b4e64\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106377 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts\") pod \"29d916ac-a71a-454f-bc05-39b8426b4e64\" (UID: \"29d916ac-a71a-454f-bc05-39b8426b4e64\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106428 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts\") pod \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.106508 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmfq8\" (UniqueName: \"kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8\") pod \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\" (UID: \"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4\") " Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.107375 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29d916ac-a71a-454f-bc05-39b8426b4e64" (UID: "29d916ac-a71a-454f-bc05-39b8426b4e64"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.108165 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" (UID: "0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.108588 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" (UID: "ba6ad75b-eef0-4bd1-b008-0661eabd1bc4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.108777 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cf847146-79d3-4259-9e19-f78f94b25dfa" (UID: "cf847146-79d3-4259-9e19-f78f94b25dfa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.111950 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j" (OuterVolumeSpecName: "kube-api-access-4974j") pod "cf847146-79d3-4259-9e19-f78f94b25dfa" (UID: "cf847146-79d3-4259-9e19-f78f94b25dfa"). InnerVolumeSpecName "kube-api-access-4974j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.112537 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7" (OuterVolumeSpecName: "kube-api-access-gdqt7") pod "29d916ac-a71a-454f-bc05-39b8426b4e64" (UID: "29d916ac-a71a-454f-bc05-39b8426b4e64"). InnerVolumeSpecName "kube-api-access-gdqt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.112561 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8" (OuterVolumeSpecName: "kube-api-access-vmfq8") pod "ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" (UID: "ba6ad75b-eef0-4bd1-b008-0661eabd1bc4"). InnerVolumeSpecName "kube-api-access-vmfq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.114045 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9" (OuterVolumeSpecName: "kube-api-access-ls4t9") pod "0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" (UID: "0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d"). InnerVolumeSpecName "kube-api-access-ls4t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209315 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf847146-79d3-4259-9e19-f78f94b25dfa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209349 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209359 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdqt7\" (UniqueName: \"kubernetes.io/projected/29d916ac-a71a-454f-bc05-39b8426b4e64-kube-api-access-gdqt7\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209368 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29d916ac-a71a-454f-bc05-39b8426b4e64-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209378 4728 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209386 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmfq8\" (UniqueName: \"kubernetes.io/projected/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4-kube-api-access-vmfq8\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209395 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4974j\" (UniqueName: \"kubernetes.io/projected/cf847146-79d3-4259-9e19-f78f94b25dfa-kube-api-access-4974j\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.209405 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls4t9\" (UniqueName: \"kubernetes.io/projected/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d-kube-api-access-ls4t9\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.246340 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.246308 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5bbb-account-create-update-2xrjf" event={"ID":"59f02d46-5d39-4908-bde8-f957fd7eb940","Type":"ContainerDied","Data":"f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.246453 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f785e9b1925df594818081e7f9faac277ccf63988e2559cdb71874dccaa3a961" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.248262 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerStarted","Data":"b70102ffab4dce7294e826c463c09a7d390427f77a609d7382462ee0cb88f791"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.248306 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerStarted","Data":"1d4d1bdcf3d0e2de4f1fe96606f65ed75ea01bf777674b1f79e8380c01220aec"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.250261 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1044-account-create-update-drt9p" event={"ID":"29d916ac-a71a-454f-bc05-39b8426b4e64","Type":"ContainerDied","Data":"d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.250292 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d452f8f9d10f3946cd34f15ee9f281fa71c7dd3b23614bd6362ced8cac5d5562" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.250349 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1044-account-create-update-drt9p" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.252524 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z59x7" event={"ID":"ba6ad75b-eef0-4bd1-b008-0661eabd1bc4","Type":"ContainerDied","Data":"2c928cf4b635914173e5bb2619fe60ff3885c366f6b01c555b7f1b52cda455cb"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.252555 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z59x7" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.252575 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c928cf4b635914173e5bb2619fe60ff3885c366f6b01c555b7f1b52cda455cb" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.255333 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vs4fm" event={"ID":"cf847146-79d3-4259-9e19-f78f94b25dfa","Type":"ContainerDied","Data":"be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.255358 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vs4fm" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.255370 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be136d96d1d270a6b65e9424be1a065502e57205d2285c481f32eef65148d7a6" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.258563 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" event={"ID":"0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d","Type":"ContainerDied","Data":"89e27901ab5cb94aaba863754da377b6e4b8ce1b726ea1c1563acf13c957d15b"} Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.258583 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-279f-account-create-update-mzbwz" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.258604 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89e27901ab5cb94aaba863754da377b6e4b8ce1b726ea1c1563acf13c957d15b" Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.283047 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.283617 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-log" containerID="cri-o://0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f" gracePeriod=30 Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.283879 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-httpd" containerID="cri-o://7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2" gracePeriod=30 Dec 05 11:31:12 crc kubenswrapper[4728]: I1205 11:31:12.362990 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb320e31-3c04-4469-9a75-52d80531280c" path="/var/lib/kubelet/pods/cb320e31-3c04-4469-9a75-52d80531280c/volumes" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.062391 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089005 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9kplx"] Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089373 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d916ac-a71a-454f-bc05-39b8426b4e64" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089389 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d916ac-a71a-454f-bc05-39b8426b4e64" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089404 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf847146-79d3-4259-9e19-f78f94b25dfa" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089412 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf847146-79d3-4259-9e19-f78f94b25dfa" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089426 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="probe" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089432 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="probe" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089449 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f02d46-5d39-4908-bde8-f957fd7eb940" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089455 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f02d46-5d39-4908-bde8-f957fd7eb940" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089474 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089479 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089491 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089497 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.089512 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="manila-scheduler" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089517 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="manila-scheduler" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089678 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d916ac-a71a-454f-bc05-39b8426b4e64" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089693 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf847146-79d3-4259-9e19-f78f94b25dfa" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089702 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" containerName="mariadb-database-create" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089715 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089723 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="probe" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089736 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="59f02d46-5d39-4908-bde8-f957fd7eb940" containerName="mariadb-account-create-update" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.089746 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerName="manila-scheduler" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.090357 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.093013 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.093397 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7xjfv" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.093545 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.125738 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.125847 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.125888 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.125996 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126021 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126061 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq2tt\" (UniqueName: \"kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt\") pod \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\" (UID: \"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e\") " Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126380 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126418 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7qqk\" (UniqueName: \"kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126446 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.126470 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.133068 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt" (OuterVolumeSpecName: "kube-api-access-rq2tt") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "kube-api-access-rq2tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.133169 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.137026 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts" (OuterVolumeSpecName: "scripts") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.157963 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.204039 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9kplx"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228161 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228218 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7qqk\" (UniqueName: \"kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228251 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228268 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228459 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228475 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228484 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.228493 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq2tt\" (UniqueName: \"kubernetes.io/projected/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-kube-api-access-rq2tt\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.234558 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.234699 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.234873 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.237342 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.259596 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7qqk\" (UniqueName: \"kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk\") pod \"nova-cell0-conductor-db-sync-9kplx\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.286715 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.288043 4728 generic.go:334] "Generic (PLEG): container finished" podID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerID="0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f" exitCode=143 Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.288272 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerDied","Data":"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f"} Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.290826 4728 generic.go:334] "Generic (PLEG): container finished" podID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" containerID="352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1" exitCode=0 Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.290880 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerDied","Data":"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1"} Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.290973 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.290990 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"a44b6cec-c64e-4a88-be5a-d51a1f6fb66e","Type":"ContainerDied","Data":"bf182e459ea8db90cc795cccd8321636fa604bb00828db49a940f3b1cd9d23e5"} Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.291012 4728 scope.go:117] "RemoveContainer" containerID="51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.311117 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerStarted","Data":"e86d26f2350c7bbf480d99fb9a2a9d65291aafe748d5346cd96a951540756cf9"} Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.318028 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data" (OuterVolumeSpecName: "config-data") pod "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" (UID: "a44b6cec-c64e-4a88-be5a-d51a1f6fb66e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.320017 4728 scope.go:117] "RemoveContainer" containerID="352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.332925 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.332952 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.344501 4728 scope.go:117] "RemoveContainer" containerID="51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.344883 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7\": container with ID starting with 51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7 not found: ID does not exist" containerID="51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.344912 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7"} err="failed to get container status \"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7\": rpc error: code = NotFound desc = could not find container \"51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7\": container with ID starting with 51732741acde6111fb99709ccc2527d03d1e4908ed446e34ec9ff3ae39521dd7 not found: ID does not exist" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.344932 4728 scope.go:117] "RemoveContainer" containerID="352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1" Dec 05 11:31:13 crc kubenswrapper[4728]: E1205 11:31:13.345104 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1\": container with ID starting with 352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1 not found: ID does not exist" containerID="352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.345126 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1"} err="failed to get container status \"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1\": rpc error: code = NotFound desc = could not find container \"352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1\": container with ID starting with 352cf0249dd32757a35c5baba19687dd187a3e006a069d650a8f9c120a7c10a1 not found: ID does not exist" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.412266 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.693856 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.715894 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.742038 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.744320 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.747537 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.755478 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848245 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6pzg\" (UniqueName: \"kubernetes.io/projected/30fe7fc4-13df-437a-8771-c6904804bcb9-kube-api-access-j6pzg\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848303 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fe7fc4-13df-437a-8771-c6904804bcb9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848347 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-scripts\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848467 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848655 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.848689 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.895184 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9kplx"] Dec 05 11:31:13 crc kubenswrapper[4728]: W1205 11:31:13.897639 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod520798b6_b938_403f_adc6_5609f6bcfd72.slice/crio-c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7 WatchSource:0}: Error finding container c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7: Status 404 returned error can't find the container with id c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7 Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952534 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952683 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952709 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952757 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6pzg\" (UniqueName: \"kubernetes.io/projected/30fe7fc4-13df-437a-8771-c6904804bcb9-kube-api-access-j6pzg\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952779 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fe7fc4-13df-437a-8771-c6904804bcb9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.952825 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-scripts\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.953064 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30fe7fc4-13df-437a-8771-c6904804bcb9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.957740 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.958435 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-config-data\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.959738 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-scripts\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.962298 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30fe7fc4-13df-437a-8771-c6904804bcb9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:13 crc kubenswrapper[4728]: I1205 11:31:13.970046 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6pzg\" (UniqueName: \"kubernetes.io/projected/30fe7fc4-13df-437a-8771-c6904804bcb9-kube-api-access-j6pzg\") pod \"manila-scheduler-0\" (UID: \"30fe7fc4-13df-437a-8771-c6904804bcb9\") " pod="openstack/manila-scheduler-0" Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.060820 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.331297 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9kplx" event={"ID":"520798b6-b938-403f-adc6-5609f6bcfd72","Type":"ContainerStarted","Data":"c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7"} Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.389726 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a44b6cec-c64e-4a88-be5a-d51a1f6fb66e" path="/var/lib/kubelet/pods/a44b6cec-c64e-4a88-be5a-d51a1f6fb66e/volumes" Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.390740 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerStarted","Data":"5f43bd438bbb8786c5507c7316849a550745aad047611b7c016353512bc5656b"} Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.534798 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 11:31:14 crc kubenswrapper[4728]: I1205 11:31:14.988183 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.316202 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.317096 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-httpd" containerID="cri-o://4355c7f8f8c64082fee96e7eaee0da584c497118fd2025378398d3ccddf09e3d" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.319107 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-log" containerID="cri-o://20438e594ebc0eb329f8e3f19d1a4a98d23077a99329c007cc5b01d1469da530" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426013 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerStarted","Data":"0eed620867e6e3a4ff05aa21581749e2f394624d0d1537c0e67914d0cd809089"} Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426179 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-central-agent" containerID="cri-o://b70102ffab4dce7294e826c463c09a7d390427f77a609d7382462ee0cb88f791" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426433 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426663 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="proxy-httpd" containerID="cri-o://0eed620867e6e3a4ff05aa21581749e2f394624d0d1537c0e67914d0cd809089" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426706 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="sg-core" containerID="cri-o://5f43bd438bbb8786c5507c7316849a550745aad047611b7c016353512bc5656b" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.426739 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-notification-agent" containerID="cri-o://e86d26f2350c7bbf480d99fb9a2a9d65291aafe748d5346cd96a951540756cf9" gracePeriod=30 Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.437899 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"30fe7fc4-13df-437a-8771-c6904804bcb9","Type":"ContainerStarted","Data":"5db1aef650d0a600de0f17998fc2f1720c18f3dfa62f41a9faebab3459b7948a"} Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.437939 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"30fe7fc4-13df-437a-8771-c6904804bcb9","Type":"ContainerStarted","Data":"fb311fd4b144bc7f1efe84c349d96a668a7e28c95957a32f3ddecacdba014bae"} Dec 05 11:31:15 crc kubenswrapper[4728]: I1205 11:31:15.457174 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.334710054 podStartE2EDuration="5.457160689s" podCreationTimestamp="2025-12-05 11:31:10 +0000 UTC" firstStartedPulling="2025-12-05 11:31:11.332620675 +0000 UTC m=+1405.474743368" lastFinishedPulling="2025-12-05 11:31:14.45507131 +0000 UTC m=+1408.597194003" observedRunningTime="2025-12-05 11:31:15.456520942 +0000 UTC m=+1409.598643655" watchObservedRunningTime="2025-12-05 11:31:15.457160689 +0000 UTC m=+1409.599283382" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.100418 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.298080 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.298636 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.298680 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299511 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299586 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299627 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299678 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsfwr\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299850 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.299911 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle\") pod \"a20bde59-1d28-4f65-8dc4-26808e4abb01\" (UID: \"a20bde59-1d28-4f65-8dc4-26808e4abb01\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.300608 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.301378 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs" (OuterVolumeSpecName: "logs") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.301439 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.307933 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph" (OuterVolumeSpecName: "ceph") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.308389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts" (OuterVolumeSpecName: "scripts") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.311214 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr" (OuterVolumeSpecName: "kube-api-access-zsfwr") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "kube-api-access-zsfwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.311502 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.345986 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.387043 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407203 4728 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407235 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407245 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407252 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20bde59-1d28-4f65-8dc4-26808e4abb01-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407262 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsfwr\" (UniqueName: \"kubernetes.io/projected/a20bde59-1d28-4f65-8dc4-26808e4abb01-kube-api-access-zsfwr\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407282 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.407291 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.424312 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data" (OuterVolumeSpecName: "config-data") pod "a20bde59-1d28-4f65-8dc4-26808e4abb01" (UID: "a20bde59-1d28-4f65-8dc4-26808e4abb01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.462018 4728 generic.go:334] "Generic (PLEG): container finished" podID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerID="7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2" exitCode=0 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.462094 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerDied","Data":"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.462125 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a20bde59-1d28-4f65-8dc4-26808e4abb01","Type":"ContainerDied","Data":"7b8b61a6d8415eac85f9109329faf2d430993d276ffe8cf4c875dd6c7a250d3b"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.462151 4728 scope.go:117] "RemoveContainer" containerID="7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.462312 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.472665 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"30fe7fc4-13df-437a-8771-c6904804bcb9","Type":"ContainerStarted","Data":"c06a64f000320ba1a0b3bfeecccf19b959c2b9bf63c2a60ff90bcd92340913a7"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.485557 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499026 4728 generic.go:334] "Generic (PLEG): container finished" podID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerID="0eed620867e6e3a4ff05aa21581749e2f394624d0d1537c0e67914d0cd809089" exitCode=0 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499066 4728 generic.go:334] "Generic (PLEG): container finished" podID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerID="5f43bd438bbb8786c5507c7316849a550745aad047611b7c016353512bc5656b" exitCode=2 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499075 4728 generic.go:334] "Generic (PLEG): container finished" podID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerID="e86d26f2350c7bbf480d99fb9a2a9d65291aafe748d5346cd96a951540756cf9" exitCode=0 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499083 4728 generic.go:334] "Generic (PLEG): container finished" podID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerID="b70102ffab4dce7294e826c463c09a7d390427f77a609d7382462ee0cb88f791" exitCode=0 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499141 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerDied","Data":"0eed620867e6e3a4ff05aa21581749e2f394624d0d1537c0e67914d0cd809089"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499166 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerDied","Data":"5f43bd438bbb8786c5507c7316849a550745aad047611b7c016353512bc5656b"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499181 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerDied","Data":"e86d26f2350c7bbf480d99fb9a2a9d65291aafe748d5346cd96a951540756cf9"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.499203 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerDied","Data":"b70102ffab4dce7294e826c463c09a7d390427f77a609d7382462ee0cb88f791"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.503494 4728 generic.go:334] "Generic (PLEG): container finished" podID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerID="20438e594ebc0eb329f8e3f19d1a4a98d23077a99329c007cc5b01d1469da530" exitCode=143 Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.503551 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerDied","Data":"20438e594ebc0eb329f8e3f19d1a4a98d23077a99329c007cc5b01d1469da530"} Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.509376 4728 scope.go:117] "RemoveContainer" containerID="0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.513422 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.513443 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20bde59-1d28-4f65-8dc4-26808e4abb01-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.552062 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.552040325 podStartE2EDuration="3.552040325s" podCreationTimestamp="2025-12-05 11:31:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:16.496552399 +0000 UTC m=+1410.638675092" watchObservedRunningTime="2025-12-05 11:31:16.552040325 +0000 UTC m=+1410.694163018" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.552412 4728 scope.go:117] "RemoveContainer" containerID="7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2" Dec 05 11:31:16 crc kubenswrapper[4728]: E1205 11:31:16.556268 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2\": container with ID starting with 7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2 not found: ID does not exist" containerID="7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.556299 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2"} err="failed to get container status \"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2\": rpc error: code = NotFound desc = could not find container \"7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2\": container with ID starting with 7a16f56434384282593b37212dee2501ad8a8b225f9aa57b98c86c81934e68e2 not found: ID does not exist" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.556320 4728 scope.go:117] "RemoveContainer" containerID="0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f" Dec 05 11:31:16 crc kubenswrapper[4728]: E1205 11:31:16.557962 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f\": container with ID starting with 0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f not found: ID does not exist" containerID="0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.557994 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f"} err="failed to get container status \"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f\": rpc error: code = NotFound desc = could not find container \"0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f\": container with ID starting with 0f5a311865e19dd3d5b1a5165e08426e494a1b78c0c70701da5f3874882d590f not found: ID does not exist" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.564050 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.574670 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.588393 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:16 crc kubenswrapper[4728]: E1205 11:31:16.588828 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-httpd" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.588908 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-httpd" Dec 05 11:31:16 crc kubenswrapper[4728]: E1205 11:31:16.589007 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-log" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.589065 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-log" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.589616 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-httpd" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.590327 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" containerName="glance-log" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.591342 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.595225 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.595239 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.609856 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.612774 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718264 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718322 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718428 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718524 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718583 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718618 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fff7z\" (UniqueName: \"kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718645 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd\") pod \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\" (UID: \"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4\") " Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.718988 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhb8x\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-kube-api-access-bhb8x\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719072 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-config-data\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719115 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719457 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719770 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719825 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.719962 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-scripts\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720064 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-ceph\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720087 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720114 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720295 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-logs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720405 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.720416 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.726072 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts" (OuterVolumeSpecName: "scripts") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.736892 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z" (OuterVolumeSpecName: "kube-api-access-fff7z") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "kube-api-access-fff7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.770579 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.811304 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.823785 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhb8x\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-kube-api-access-bhb8x\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.823868 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-config-data\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.823893 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.823937 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.823997 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-scripts\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824024 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-ceph\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824039 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824057 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824105 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-logs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824152 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824165 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fff7z\" (UniqueName: \"kubernetes.io/projected/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-kube-api-access-fff7z\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824175 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824186 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.824573 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-logs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.826760 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.829495 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.831464 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-scripts\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.833057 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-config-data\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.838911 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-ceph\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.843953 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhb8x\" (UniqueName: \"kubernetes.io/projected/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-kube-api-access-bhb8x\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.847421 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.847665 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32cf3773-bc1a-4c62-9b1a-8fc95e42e403-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.866375 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"32cf3773-bc1a-4c62-9b1a-8fc95e42e403\") " pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.894328 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data" (OuterVolumeSpecName: "config-data") pod "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" (UID: "fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.925196 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:31:16 crc kubenswrapper[4728]: I1205 11:31:16.925870 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.518593 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.518588 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4","Type":"ContainerDied","Data":"1d4d1bdcf3d0e2de4f1fe96606f65ed75ea01bf777674b1f79e8380c01220aec"} Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.519549 4728 scope.go:117] "RemoveContainer" containerID="0eed620867e6e3a4ff05aa21581749e2f394624d0d1537c0e67914d0cd809089" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.563249 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.567073 4728 scope.go:117] "RemoveContainer" containerID="5f43bd438bbb8786c5507c7316849a550745aad047611b7c016353512bc5656b" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.573300 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.581690 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605031 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:17 crc kubenswrapper[4728]: E1205 11:31:17.605447 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-central-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605472 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-central-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: E1205 11:31:17.605486 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="proxy-httpd" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605493 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="proxy-httpd" Dec 05 11:31:17 crc kubenswrapper[4728]: E1205 11:31:17.605524 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-notification-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605530 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-notification-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: E1205 11:31:17.605541 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="sg-core" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605547 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="sg-core" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605704 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="sg-core" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605723 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="proxy-httpd" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605744 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-notification-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.605756 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" containerName="ceilometer-central-agent" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.607414 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.611285 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.611454 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.616089 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.643068 4728 scope.go:117] "RemoveContainer" containerID="e86d26f2350c7bbf480d99fb9a2a9d65291aafe748d5346cd96a951540756cf9" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.695851 4728 scope.go:117] "RemoveContainer" containerID="b70102ffab4dce7294e826c463c09a7d390427f77a609d7382462ee0cb88f791" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742522 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742615 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742647 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mttx\" (UniqueName: \"kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742711 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742869 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.742904 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.743008 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844637 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844691 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844714 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mttx\" (UniqueName: \"kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844740 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844801 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844829 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.844883 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.845280 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.845384 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.852018 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.853213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.861773 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.865462 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mttx\" (UniqueName: \"kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.865961 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data\") pod \"ceilometer-0\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " pod="openstack/ceilometer-0" Dec 05 11:31:17 crc kubenswrapper[4728]: I1205 11:31:17.943514 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.370255 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a20bde59-1d28-4f65-8dc4-26808e4abb01" path="/var/lib/kubelet/pods/a20bde59-1d28-4f65-8dc4-26808e4abb01/volumes" Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.371222 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4" path="/var/lib/kubelet/pods/fb33c4ee-7b55-4da4-8977-7dfd8bf7a0e4/volumes" Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.406571 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:18 crc kubenswrapper[4728]: W1205 11:31:18.416344 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5679ebbf_dd32_4faa_b195_b7f01365edff.slice/crio-7ba4bcea6aa258ca5543e116b96248d0b7736678362d7e9f68a2906be6769334 WatchSource:0}: Error finding container 7ba4bcea6aa258ca5543e116b96248d0b7736678362d7e9f68a2906be6769334: Status 404 returned error can't find the container with id 7ba4bcea6aa258ca5543e116b96248d0b7736678362d7e9f68a2906be6769334 Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.588375 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerStarted","Data":"7ba4bcea6aa258ca5543e116b96248d0b7736678362d7e9f68a2906be6769334"} Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.613362 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32cf3773-bc1a-4c62-9b1a-8fc95e42e403","Type":"ContainerStarted","Data":"293fb9342c9c04f9ecf3fff9b91cf76b16dac1adfd14b7b773f8a4016d286a05"} Dec 05 11:31:18 crc kubenswrapper[4728]: I1205 11:31:18.613435 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32cf3773-bc1a-4c62-9b1a-8fc95e42e403","Type":"ContainerStarted","Data":"e804aba11af8bb563f39a77b308fdf8317718ff57e5bcb1a1f5bc96b1383d657"} Dec 05 11:31:19 crc kubenswrapper[4728]: I1205 11:31:19.177184 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:19 crc kubenswrapper[4728]: I1205 11:31:19.640157 4728 generic.go:334] "Generic (PLEG): container finished" podID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerID="4355c7f8f8c64082fee96e7eaee0da584c497118fd2025378398d3ccddf09e3d" exitCode=0 Dec 05 11:31:19 crc kubenswrapper[4728]: I1205 11:31:19.640254 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerDied","Data":"4355c7f8f8c64082fee96e7eaee0da584c497118fd2025378398d3ccddf09e3d"} Dec 05 11:31:19 crc kubenswrapper[4728]: I1205 11:31:19.643373 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"32cf3773-bc1a-4c62-9b1a-8fc95e42e403","Type":"ContainerStarted","Data":"2d0309cbb8a85ac19fe32fd6a103f5e058d457861d403db50f6b9a063039d283"} Dec 05 11:31:19 crc kubenswrapper[4728]: I1205 11:31:19.671412 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.671388044 podStartE2EDuration="3.671388044s" podCreationTimestamp="2025-12-05 11:31:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:19.662770989 +0000 UTC m=+1413.804893692" watchObservedRunningTime="2025-12-05 11:31:19.671388044 +0000 UTC m=+1413.813510737" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.619371 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.711361 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4","Type":"ContainerDied","Data":"af91f23b4932b1dc7afe90ba566dde86274ba538be2249b9ada444826eb680dd"} Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.711416 4728 scope.go:117] "RemoveContainer" containerID="4355c7f8f8c64082fee96e7eaee0da584c497118fd2025378398d3ccddf09e3d" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.711560 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766397 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766537 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766597 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766633 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766681 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxw46\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766703 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766765 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766822 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.766854 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\" (UID: \"c8ca2efe-4d75-46f8-9878-e4f76b8e85f4\") " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.767650 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.767894 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs" (OuterVolumeSpecName: "logs") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.779031 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph" (OuterVolumeSpecName: "ceph") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.781146 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts" (OuterVolumeSpecName: "scripts") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.785113 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46" (OuterVolumeSpecName: "kube-api-access-vxw46") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "kube-api-access-vxw46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.789895 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.811494 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.830556 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.832416 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data" (OuterVolumeSpecName: "config-data") pod "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" (UID: "c8ca2efe-4d75-46f8-9878-e4f76b8e85f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869403 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869449 4728 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869459 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869469 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869477 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxw46\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-kube-api-access-vxw46\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869489 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869497 4728 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869504 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.869541 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.890174 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.918273 4728 scope.go:117] "RemoveContainer" containerID="20438e594ebc0eb329f8e3f19d1a4a98d23077a99329c007cc5b01d1469da530" Dec 05 11:31:23 crc kubenswrapper[4728]: I1205 11:31:23.971861 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.055971 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.060985 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.085862 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.112165 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:24 crc kubenswrapper[4728]: E1205 11:31:24.112535 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-httpd" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.112556 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-httpd" Dec 05 11:31:24 crc kubenswrapper[4728]: E1205 11:31:24.112570 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-log" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.112576 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-log" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.112756 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-log" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.112785 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" containerName="glance-httpd" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.113714 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.125272 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.125482 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.151540 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.279380 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.279448 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.279484 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.279605 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.280278 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.280368 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.280540 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hlkd\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-kube-api-access-8hlkd\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.280600 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.280636 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.366419 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8ca2efe-4d75-46f8-9878-e4f76b8e85f4" path="/var/lib/kubelet/pods/c8ca2efe-4d75-46f8-9878-e4f76b8e85f4/volumes" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.381956 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382001 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382029 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382060 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382114 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382135 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382186 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hlkd\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-kube-api-access-8hlkd\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382207 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.382226 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.383045 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.384224 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f030a4aa-1b8c-4889-9385-56c75001c4f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.384226 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.389263 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.390024 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.390735 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.402490 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.406383 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f030a4aa-1b8c-4889-9385-56c75001c4f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.409853 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hlkd\" (UniqueName: \"kubernetes.io/projected/f030a4aa-1b8c-4889-9385-56c75001c4f5-kube-api-access-8hlkd\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.448070 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f030a4aa-1b8c-4889-9385-56c75001c4f5\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.461124 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.722724 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerStarted","Data":"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7"} Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.724913 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerStarted","Data":"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7"} Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.732031 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9kplx" event={"ID":"520798b6-b938-403f-adc6-5609f6bcfd72","Type":"ContainerStarted","Data":"2b42f847a28395ea15946363dc65dd757c05238163f8daf33a3faab37cc4c804"} Dec 05 11:31:24 crc kubenswrapper[4728]: I1205 11:31:24.768133 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-9kplx" podStartSLOduration=2.167551227 podStartE2EDuration="11.768110691s" podCreationTimestamp="2025-12-05 11:31:13 +0000 UTC" firstStartedPulling="2025-12-05 11:31:13.900992131 +0000 UTC m=+1408.043114824" lastFinishedPulling="2025-12-05 11:31:23.501551565 +0000 UTC m=+1417.643674288" observedRunningTime="2025-12-05 11:31:24.755039554 +0000 UTC m=+1418.897162247" watchObservedRunningTime="2025-12-05 11:31:24.768110691 +0000 UTC m=+1418.910233394" Dec 05 11:31:25 crc kubenswrapper[4728]: W1205 11:31:25.063569 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf030a4aa_1b8c_4889_9385_56c75001c4f5.slice/crio-6b81a71c5022b970262c5637f10a9a51298ee67830e4bbb69f8f7f6b7774a0c6 WatchSource:0}: Error finding container 6b81a71c5022b970262c5637f10a9a51298ee67830e4bbb69f8f7f6b7774a0c6: Status 404 returned error can't find the container with id 6b81a71c5022b970262c5637f10a9a51298ee67830e4bbb69f8f7f6b7774a0c6 Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.075081 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.702134 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.702400 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.752289 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerStarted","Data":"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd"} Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.756073 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f030a4aa-1b8c-4889-9385-56c75001c4f5","Type":"ContainerStarted","Data":"6864b960283818e2861e439f73ed175770e3d97709916f91bc0c62b69feffcf6"} Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.756114 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f030a4aa-1b8c-4889-9385-56c75001c4f5","Type":"ContainerStarted","Data":"6b81a71c5022b970262c5637f10a9a51298ee67830e4bbb69f8f7f6b7774a0c6"} Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.763896 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerStarted","Data":"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa"} Dec 05 11:31:25 crc kubenswrapper[4728]: I1205 11:31:25.775710 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.227413792 podStartE2EDuration="38.775691661s" podCreationTimestamp="2025-12-05 11:30:47 +0000 UTC" firstStartedPulling="2025-12-05 11:30:48.925936139 +0000 UTC m=+1383.068058832" lastFinishedPulling="2025-12-05 11:31:23.474213998 +0000 UTC m=+1417.616336701" observedRunningTime="2025-12-05 11:31:25.771895397 +0000 UTC m=+1419.914018090" watchObservedRunningTime="2025-12-05 11:31:25.775691661 +0000 UTC m=+1419.917814354" Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.776518 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f030a4aa-1b8c-4889-9385-56c75001c4f5","Type":"ContainerStarted","Data":"aaedf9cc1cb2c88d71887f94a22a645aa34e1874611f055b99d9621e43dfebb2"} Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.783532 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerStarted","Data":"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7"} Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.812000 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.8119832049999998 podStartE2EDuration="2.811983205s" podCreationTimestamp="2025-12-05 11:31:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:26.803781761 +0000 UTC m=+1420.945904454" watchObservedRunningTime="2025-12-05 11:31:26.811983205 +0000 UTC m=+1420.954105898" Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.926461 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.926501 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.963751 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:31:26 crc kubenswrapper[4728]: I1205 11:31:26.990622 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.816060 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-central-agent" containerID="cri-o://d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7" gracePeriod=30 Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.816926 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="proxy-httpd" containerID="cri-o://fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a" gracePeriod=30 Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.816978 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="sg-core" containerID="cri-o://b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7" gracePeriod=30 Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.817011 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-notification-agent" containerID="cri-o://2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa" gracePeriod=30 Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.818218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerStarted","Data":"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a"} Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.818303 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.818329 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.818484 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:31:27 crc kubenswrapper[4728]: I1205 11:31:27.844765 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.286823316 podStartE2EDuration="10.844739503s" podCreationTimestamp="2025-12-05 11:31:17 +0000 UTC" firstStartedPulling="2025-12-05 11:31:18.419698824 +0000 UTC m=+1412.561821517" lastFinishedPulling="2025-12-05 11:31:26.977615011 +0000 UTC m=+1421.119737704" observedRunningTime="2025-12-05 11:31:27.838129643 +0000 UTC m=+1421.980252356" watchObservedRunningTime="2025-12-05 11:31:27.844739503 +0000 UTC m=+1421.986862216" Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.163415 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.829755 4728 generic.go:334] "Generic (PLEG): container finished" podID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerID="fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a" exitCode=0 Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.830173 4728 generic.go:334] "Generic (PLEG): container finished" podID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerID="b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7" exitCode=2 Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.830195 4728 generic.go:334] "Generic (PLEG): container finished" podID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerID="2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa" exitCode=0 Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.829777 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerDied","Data":"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a"} Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.830251 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerDied","Data":"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7"} Dec 05 11:31:28 crc kubenswrapper[4728]: I1205 11:31:28.830265 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerDied","Data":"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa"} Dec 05 11:31:29 crc kubenswrapper[4728]: I1205 11:31:29.762643 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:31:29 crc kubenswrapper[4728]: I1205 11:31:29.838846 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:31:29 crc kubenswrapper[4728]: I1205 11:31:29.917528 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.462879 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.463399 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.499969 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.502264 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.892590 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:34 crc kubenswrapper[4728]: I1205 11:31:34.893027 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:35 crc kubenswrapper[4728]: I1205 11:31:35.987979 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.524132 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.652686 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.652759 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mttx\" (UniqueName: \"kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.652911 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.652965 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.652991 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.653011 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.653037 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data\") pod \"5679ebbf-dd32-4faa-b195-b7f01365edff\" (UID: \"5679ebbf-dd32-4faa-b195-b7f01365edff\") " Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.653395 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.654452 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.659062 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts" (OuterVolumeSpecName: "scripts") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.660063 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx" (OuterVolumeSpecName: "kube-api-access-7mttx") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "kube-api-access-7mttx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.684097 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.742047 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755132 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755170 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755181 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755192 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5679ebbf-dd32-4faa-b195-b7f01365edff-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755201 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.755209 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mttx\" (UniqueName: \"kubernetes.io/projected/5679ebbf-dd32-4faa-b195-b7f01365edff-kube-api-access-7mttx\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.771813 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data" (OuterVolumeSpecName: "config-data") pod "5679ebbf-dd32-4faa-b195-b7f01365edff" (UID: "5679ebbf-dd32-4faa-b195-b7f01365edff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.856835 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5679ebbf-dd32-4faa-b195-b7f01365edff-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.915835 4728 generic.go:334] "Generic (PLEG): container finished" podID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerID="d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7" exitCode=0 Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.915875 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerDied","Data":"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7"} Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.915901 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5679ebbf-dd32-4faa-b195-b7f01365edff","Type":"ContainerDied","Data":"7ba4bcea6aa258ca5543e116b96248d0b7736678362d7e9f68a2906be6769334"} Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.915919 4728 scope.go:117] "RemoveContainer" containerID="fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.916049 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.957078 4728 scope.go:117] "RemoveContainer" containerID="b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7" Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.972074 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.982429 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:36 crc kubenswrapper[4728]: I1205 11:31:36.993071 4728 scope.go:117] "RemoveContainer" containerID="2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.006424 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.006898 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="sg-core" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.006920 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="sg-core" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.006936 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-notification-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.006944 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-notification-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.006976 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-central-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.006983 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-central-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.006997 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="proxy-httpd" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.007004 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="proxy-httpd" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.007190 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="sg-core" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.007210 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-central-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.007225 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="ceilometer-notification-agent" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.007241 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" containerName="proxy-httpd" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.009179 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.013263 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.013434 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.021414 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.041949 4728 scope.go:117] "RemoveContainer" containerID="d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.059994 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060059 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8p29\" (UniqueName: \"kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060122 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060155 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060193 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060226 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.060251 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.072442 4728 scope.go:117] "RemoveContainer" containerID="fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.075830 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a\": container with ID starting with fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a not found: ID does not exist" containerID="fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.075903 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a"} err="failed to get container status \"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a\": rpc error: code = NotFound desc = could not find container \"fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a\": container with ID starting with fb31923c03f8745f5babcb1bda0b7a526e998a658137a6688d94cdf7c455038a not found: ID does not exist" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.075927 4728 scope.go:117] "RemoveContainer" containerID="b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.077990 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7\": container with ID starting with b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7 not found: ID does not exist" containerID="b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.078032 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7"} err="failed to get container status \"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7\": rpc error: code = NotFound desc = could not find container \"b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7\": container with ID starting with b12561be0ee58441f0f81504b007dc487767744c68307cd9fa1b8d9c2555f8f7 not found: ID does not exist" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.078077 4728 scope.go:117] "RemoveContainer" containerID="2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.078326 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa\": container with ID starting with 2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa not found: ID does not exist" containerID="2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.078345 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa"} err="failed to get container status \"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa\": rpc error: code = NotFound desc = could not find container \"2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa\": container with ID starting with 2c378492169ae2028fab5b59f71495b0da2eedebdd02dfc8eb627a9872673daa not found: ID does not exist" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.078357 4728 scope.go:117] "RemoveContainer" containerID="d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7" Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.079251 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7\": container with ID starting with d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7 not found: ID does not exist" containerID="d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.079273 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7"} err="failed to get container status \"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7\": rpc error: code = NotFound desc = could not find container \"d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7\": container with ID starting with d13d8a8ab7d85ed57b5ad11712a20d8712b389667e5a8db5efddeaf18fbb15a7 not found: ID does not exist" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.081999 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:37 crc kubenswrapper[4728]: E1205 11:31:37.082852 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-r8p29 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161452 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161523 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161572 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161610 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161633 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161729 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.161765 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8p29\" (UniqueName: \"kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.162353 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.163142 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.166297 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.166636 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.166640 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.169321 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.178241 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8p29\" (UniqueName: \"kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29\") pod \"ceilometer-0\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.190567 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.190641 4728 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.191954 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.928820 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.940372 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976240 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976314 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976346 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976379 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976445 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976547 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976620 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8p29\" (UniqueName: \"kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29\") pod \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\" (UID: \"693a6cc7-eba1-4d9d-8c6f-cd61801f7a51\") " Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.976646 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.977049 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.977863 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.992003 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.992020 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29" (OuterVolumeSpecName: "kube-api-access-r8p29") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "kube-api-access-r8p29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.992036 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.992143 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts" (OuterVolumeSpecName: "scripts") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:37 crc kubenswrapper[4728]: I1205 11:31:37.993339 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data" (OuterVolumeSpecName: "config-data") pod "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" (UID: "693a6cc7-eba1-4d9d-8c6f-cd61801f7a51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081293 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081363 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081376 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081391 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8p29\" (UniqueName: \"kubernetes.io/projected/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-kube-api-access-r8p29\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081403 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.081414 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.364299 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5679ebbf-dd32-4faa-b195-b7f01365edff" path="/var/lib/kubelet/pods/5679ebbf-dd32-4faa-b195-b7f01365edff/volumes" Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.943408 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9kplx" event={"ID":"520798b6-b938-403f-adc6-5609f6bcfd72","Type":"ContainerDied","Data":"2b42f847a28395ea15946363dc65dd757c05238163f8daf33a3faab37cc4c804"} Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.943348 4728 generic.go:334] "Generic (PLEG): container finished" podID="520798b6-b938-403f-adc6-5609f6bcfd72" containerID="2b42f847a28395ea15946363dc65dd757c05238163f8daf33a3faab37cc4c804" exitCode=0 Dec 05 11:31:38 crc kubenswrapper[4728]: I1205 11:31:38.944459 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.019077 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.030252 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.057822 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.060146 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.065885 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.065940 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.077347 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100488 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100543 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2c4x\" (UniqueName: \"kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100670 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100692 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100733 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.100893 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.101118 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203446 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203578 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203607 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2c4x\" (UniqueName: \"kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203687 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203711 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203759 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.203810 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.204245 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.204338 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.209788 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.216609 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.217167 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.219544 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.235004 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2c4x\" (UniqueName: \"kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x\") pod \"ceilometer-0\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.387224 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.694227 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.769047 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.860560 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.952984 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerStarted","Data":"bf5a7da2210a30cead1562e519c52710c9ee31b2972559f2888c70b0a5f17c66"} Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.953142 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="manila-share" containerID="cri-o://622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" gracePeriod=30 Dec 05 11:31:39 crc kubenswrapper[4728]: I1205 11:31:39.953824 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="probe" containerID="cri-o://c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" gracePeriod=30 Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.194945 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.222881 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7qqk\" (UniqueName: \"kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk\") pod \"520798b6-b938-403f-adc6-5609f6bcfd72\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.222965 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts\") pod \"520798b6-b938-403f-adc6-5609f6bcfd72\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.223074 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle\") pod \"520798b6-b938-403f-adc6-5609f6bcfd72\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.223115 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data\") pod \"520798b6-b938-403f-adc6-5609f6bcfd72\" (UID: \"520798b6-b938-403f-adc6-5609f6bcfd72\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.246350 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts" (OuterVolumeSpecName: "scripts") pod "520798b6-b938-403f-adc6-5609f6bcfd72" (UID: "520798b6-b938-403f-adc6-5609f6bcfd72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.251953 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk" (OuterVolumeSpecName: "kube-api-access-j7qqk") pod "520798b6-b938-403f-adc6-5609f6bcfd72" (UID: "520798b6-b938-403f-adc6-5609f6bcfd72"). InnerVolumeSpecName "kube-api-access-j7qqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.256699 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data" (OuterVolumeSpecName: "config-data") pod "520798b6-b938-403f-adc6-5609f6bcfd72" (UID: "520798b6-b938-403f-adc6-5609f6bcfd72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.290587 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "520798b6-b938-403f-adc6-5609f6bcfd72" (UID: "520798b6-b938-403f-adc6-5609f6bcfd72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.327553 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.327597 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.327611 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7qqk\" (UniqueName: \"kubernetes.io/projected/520798b6-b938-403f-adc6-5609f6bcfd72-kube-api-access-j7qqk\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.327625 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/520798b6-b938-403f-adc6-5609f6bcfd72-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.373933 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="693a6cc7-eba1-4d9d-8c6f-cd61801f7a51" path="/var/lib/kubelet/pods/693a6cc7-eba1-4d9d-8c6f-cd61801f7a51/volumes" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.755588 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835539 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835617 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835753 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835821 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835855 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835933 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.835969 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flzfl\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.836002 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.836076 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.836137 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.836544 4728 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.836565 4728 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d203e1b2-68bf-458f-b1de-b590da34a559-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.840403 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph" (OuterVolumeSpecName: "ceph") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.840814 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl" (OuterVolumeSpecName: "kube-api-access-flzfl") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "kube-api-access-flzfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.842937 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts" (OuterVolumeSpecName: "scripts") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.843060 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.885339 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.937437 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data" (OuterVolumeSpecName: "config-data") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.937621 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") pod \"d203e1b2-68bf-458f-b1de-b590da34a559\" (UID: \"d203e1b2-68bf-458f-b1de-b590da34a559\") " Dec 05 11:31:40 crc kubenswrapper[4728]: W1205 11:31:40.937732 4728 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/d203e1b2-68bf-458f-b1de-b590da34a559/volumes/kubernetes.io~secret/config-data Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.937744 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data" (OuterVolumeSpecName: "config-data") pod "d203e1b2-68bf-458f-b1de-b590da34a559" (UID: "d203e1b2-68bf-458f-b1de-b590da34a559"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938360 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938389 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flzfl\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-kube-api-access-flzfl\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938402 4728 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d203e1b2-68bf-458f-b1de-b590da34a559-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938412 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938422 4728 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.938433 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d203e1b2-68bf-458f-b1de-b590da34a559-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.972451 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerStarted","Data":"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6"} Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.973838 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9kplx" event={"ID":"520798b6-b938-403f-adc6-5609f6bcfd72","Type":"ContainerDied","Data":"c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7"} Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.973865 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7d80b84363f0e2ccc4137fb0d45a22cea21f52584f0ac6185f7684bdbde64e7" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.973859 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9kplx" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978658 4728 generic.go:334] "Generic (PLEG): container finished" podID="d203e1b2-68bf-458f-b1de-b590da34a559" containerID="c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" exitCode=0 Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978684 4728 generic.go:334] "Generic (PLEG): container finished" podID="d203e1b2-68bf-458f-b1de-b590da34a559" containerID="622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" exitCode=1 Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978701 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerDied","Data":"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd"} Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978722 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerDied","Data":"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7"} Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978733 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d203e1b2-68bf-458f-b1de-b590da34a559","Type":"ContainerDied","Data":"ec7428cb059ca4a77ca17a5964602a3b1e036795971cabc8d8267be4dbbd40d6"} Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978747 4728 scope.go:117] "RemoveContainer" containerID="c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" Dec 05 11:31:40 crc kubenswrapper[4728]: I1205 11:31:40.978861 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.036046 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.043092 4728 scope.go:117] "RemoveContainer" containerID="622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.063880 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.078502 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: E1205 11:31:41.079331 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="520798b6-b938-403f-adc6-5609f6bcfd72" containerName="nova-cell0-conductor-db-sync" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079349 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="520798b6-b938-403f-adc6-5609f6bcfd72" containerName="nova-cell0-conductor-db-sync" Dec 05 11:31:41 crc kubenswrapper[4728]: E1205 11:31:41.079387 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="probe" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079397 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="probe" Dec 05 11:31:41 crc kubenswrapper[4728]: E1205 11:31:41.079429 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="manila-share" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079438 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="manila-share" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079681 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="probe" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079729 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" containerName="manila-share" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.079756 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="520798b6-b938-403f-adc6-5609f6bcfd72" containerName="nova-cell0-conductor-db-sync" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.084168 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.087166 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.100753 4728 scope.go:117] "RemoveContainer" containerID="c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.101645 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: E1205 11:31:41.102509 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd\": container with ID starting with c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd not found: ID does not exist" containerID="c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.102561 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd"} err="failed to get container status \"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd\": rpc error: code = NotFound desc = could not find container \"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd\": container with ID starting with c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd not found: ID does not exist" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.102587 4728 scope.go:117] "RemoveContainer" containerID="622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" Dec 05 11:31:41 crc kubenswrapper[4728]: E1205 11:31:41.105677 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7\": container with ID starting with 622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7 not found: ID does not exist" containerID="622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.105735 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7"} err="failed to get container status \"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7\": rpc error: code = NotFound desc = could not find container \"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7\": container with ID starting with 622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7 not found: ID does not exist" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.105760 4728 scope.go:117] "RemoveContainer" containerID="c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.107451 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd"} err="failed to get container status \"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd\": rpc error: code = NotFound desc = could not find container \"c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd\": container with ID starting with c3fb85e00c97f4d31dc6a1e05e8ab0b0634263b2d45d3618e3079b9f694464cd not found: ID does not exist" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.107492 4728 scope.go:117] "RemoveContainer" containerID="622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.109081 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7"} err="failed to get container status \"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7\": rpc error: code = NotFound desc = could not find container \"622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7\": container with ID starting with 622698247e7db7a3b73bfd3ef61d99c7db074177bb5fe827a02188c26d467ee7 not found: ID does not exist" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.113450 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.114710 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.120404 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7xjfv" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.120932 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.125562 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146710 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-ceph\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146755 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146773 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146840 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146865 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146910 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-scripts\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146935 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.146999 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntct6\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-kube-api-access-ntct6\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.147025 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.147075 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw67g\" (UniqueName: \"kubernetes.io/projected/7802cd18-c771-414b-afd5-f6d47c588a58-kube-api-access-tw67g\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.147098 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.249931 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-scripts\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.249980 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250041 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntct6\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-kube-api-access-ntct6\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250073 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250123 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw67g\" (UniqueName: \"kubernetes.io/projected/7802cd18-c771-414b-afd5-f6d47c588a58-kube-api-access-tw67g\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250147 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250256 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-ceph\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250288 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250313 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250362 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250383 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.250654 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.251128 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e76d402c-8c19-4097-8c06-9bb28018f661-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.254427 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.254441 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.254450 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7802cd18-c771-414b-afd5-f6d47c588a58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.254636 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.255804 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-scripts\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.256042 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-ceph\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.260381 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76d402c-8c19-4097-8c06-9bb28018f661-config-data\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.273420 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntct6\" (UniqueName: \"kubernetes.io/projected/e76d402c-8c19-4097-8c06-9bb28018f661-kube-api-access-ntct6\") pod \"manila-share-share1-0\" (UID: \"e76d402c-8c19-4097-8c06-9bb28018f661\") " pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.273887 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw67g\" (UniqueName: \"kubernetes.io/projected/7802cd18-c771-414b-afd5-f6d47c588a58-kube-api-access-tw67g\") pod \"nova-cell0-conductor-0\" (UID: \"7802cd18-c771-414b-afd5-f6d47c588a58\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.420648 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.456480 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.998655 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerStarted","Data":"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446"} Dec 05 11:31:41 crc kubenswrapper[4728]: I1205 11:31:41.999259 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerStarted","Data":"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d"} Dec 05 11:31:42 crc kubenswrapper[4728]: I1205 11:31:42.013479 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 11:31:42 crc kubenswrapper[4728]: I1205 11:31:42.046537 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:31:42 crc kubenswrapper[4728]: I1205 11:31:42.364170 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d203e1b2-68bf-458f-b1de-b590da34a559" path="/var/lib/kubelet/pods/d203e1b2-68bf-458f-b1de-b590da34a559/volumes" Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.010875 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7802cd18-c771-414b-afd5-f6d47c588a58","Type":"ContainerStarted","Data":"3d2a4e7552103919e0d3559ee17c22efa8db8f98f63525f23b8886c0b83ba830"} Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.011453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7802cd18-c771-414b-afd5-f6d47c588a58","Type":"ContainerStarted","Data":"fea03c9d4a138882965ab66aec92e6898c2b286651bdbecc1fa05a660e502408"} Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.011651 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.012984 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e76d402c-8c19-4097-8c06-9bb28018f661","Type":"ContainerStarted","Data":"449c377d1647fdc48617a325cea7d970f440af3a97ad81b5fa52a20589581d77"} Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.013019 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e76d402c-8c19-4097-8c06-9bb28018f661","Type":"ContainerStarted","Data":"3a09fdfab879ec99d736ad7cae76c147e807613c808532476c0ab012ebb665f8"} Dec 05 11:31:43 crc kubenswrapper[4728]: I1205 11:31:43.036172 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.036150337 podStartE2EDuration="2.036150337s" podCreationTimestamp="2025-12-05 11:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:43.026615306 +0000 UTC m=+1437.168738009" watchObservedRunningTime="2025-12-05 11:31:43.036150337 +0000 UTC m=+1437.178273030" Dec 05 11:31:44 crc kubenswrapper[4728]: I1205 11:31:44.022740 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e76d402c-8c19-4097-8c06-9bb28018f661","Type":"ContainerStarted","Data":"c7b77d2ee393651a3c837f1719c379142a78bcce87ed872844dab0142337b417"} Dec 05 11:31:44 crc kubenswrapper[4728]: I1205 11:31:44.025421 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerStarted","Data":"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5"} Dec 05 11:31:44 crc kubenswrapper[4728]: I1205 11:31:44.025559 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:31:44 crc kubenswrapper[4728]: I1205 11:31:44.055891 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.055864728 podStartE2EDuration="3.055864728s" podCreationTimestamp="2025-12-05 11:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:44.04132144 +0000 UTC m=+1438.183444133" watchObservedRunningTime="2025-12-05 11:31:44.055864728 +0000 UTC m=+1438.197987461" Dec 05 11:31:44 crc kubenswrapper[4728]: I1205 11:31:44.063615 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.098896654 podStartE2EDuration="5.063600429s" podCreationTimestamp="2025-12-05 11:31:39 +0000 UTC" firstStartedPulling="2025-12-05 11:31:39.864647651 +0000 UTC m=+1434.006770344" lastFinishedPulling="2025-12-05 11:31:42.829351416 +0000 UTC m=+1436.971474119" observedRunningTime="2025-12-05 11:31:44.062692354 +0000 UTC m=+1438.204815057" watchObservedRunningTime="2025-12-05 11:31:44.063600429 +0000 UTC m=+1438.205723122" Dec 05 11:31:51 crc kubenswrapper[4728]: I1205 11:31:51.421783 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 05 11:31:51 crc kubenswrapper[4728]: I1205 11:31:51.501727 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 11:31:51 crc kubenswrapper[4728]: I1205 11:31:51.999654 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6x7gh"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.001376 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.002963 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.003256 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.009041 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6x7gh"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.109355 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.109438 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.109512 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.109579 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr22h\" (UniqueName: \"kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.166562 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.168768 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.170935 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.183097 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.211162 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr22h\" (UniqueName: \"kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.211252 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.211316 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.211394 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.217070 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.217217 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.218076 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.302660 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr22h\" (UniqueName: \"kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h\") pod \"nova-cell0-cell-mapping-6x7gh\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.315274 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.315353 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhpvh\" (UniqueName: \"kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.315420 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.315573 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.324302 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.410504 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.412377 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.428849 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.430367 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.430449 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.430562 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhpvh\" (UniqueName: \"kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.432926 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.441785 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.444401 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.510654 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.532554 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-798rb\" (UniqueName: \"kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.532631 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.532761 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.533136 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.568632 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.576867 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.578449 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.578526 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhpvh\" (UniqueName: \"kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh\") pod \"nova-api-0\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.584302 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.648932 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.652315 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.652387 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.652519 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.652647 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-798rb\" (UniqueName: \"kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.654235 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.674688 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.689442 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.698048 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.710202 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-798rb\" (UniqueName: \"kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb\") pod \"nova-metadata-0\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.782715 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.785210 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.793559 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rvcq\" (UniqueName: \"kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.793770 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.794995 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.798206 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.798621 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.812306 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.814751 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.815253 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.847088 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.898079 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqqgv\" (UniqueName: \"kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.898274 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.898347 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.898391 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.899896 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900015 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rvcq\" (UniqueName: \"kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900097 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b84xr\" (UniqueName: \"kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900167 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900304 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900334 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900354 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.900378 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.910571 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.914305 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:52 crc kubenswrapper[4728]: I1205 11:31:52.922082 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rvcq\" (UniqueName: \"kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq\") pod \"nova-cell1-novncproxy-0\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.002408 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003708 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003729 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003783 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqqgv\" (UniqueName: \"kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003846 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003876 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003909 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003945 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003976 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b84xr\" (UniqueName: \"kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.003662 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.005551 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.005896 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.006040 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.006398 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.009649 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.010912 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.029253 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b84xr\" (UniqueName: \"kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr\") pod \"dnsmasq-dns-6b6c754dc9-czxc8\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.032566 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqqgv\" (UniqueName: \"kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv\") pod \"nova-scheduler-0\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.119785 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6x7gh"] Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.119876 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:31:53 crc kubenswrapper[4728]: W1205 11:31:53.122584 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbdffaa6a_7be2_40fa_915d_f36ff3a2ad9e.slice/crio-a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0 WatchSource:0}: Error finding container a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0: Status 404 returned error can't find the container with id a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0 Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.140446 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.182663 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6x7gh" event={"ID":"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e","Type":"ContainerStarted","Data":"a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0"} Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.206773 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.226965 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-k7g8m"] Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.228327 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.234641 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.234839 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.235822 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-k7g8m"] Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.245675 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.315295 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.315554 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dr9z\" (UniqueName: \"kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.315600 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.317568 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.355569 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:31:53 crc kubenswrapper[4728]: W1205 11:31:53.404719 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4f746e6_fdda_4860_830d_34b37eff58a2.slice/crio-008cf08e1a6306a55264345325db536bf84a7ac65ac9534b5a99287b6856fe1a WatchSource:0}: Error finding container 008cf08e1a6306a55264345325db536bf84a7ac65ac9534b5a99287b6856fe1a: Status 404 returned error can't find the container with id 008cf08e1a6306a55264345325db536bf84a7ac65ac9534b5a99287b6856fe1a Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.420459 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.420549 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.420614 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dr9z\" (UniqueName: \"kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.420693 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.427118 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.439311 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.441613 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.441953 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dr9z\" (UniqueName: \"kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z\") pod \"nova-cell1-conductor-db-sync-k7g8m\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.561432 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.703679 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:31:53 crc kubenswrapper[4728]: W1205 11:31:53.721301 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f559b48_333f_446d_bab9_177aa33286c2.slice/crio-fbdfd0ee232860dda892e17c45c9a66839f517f7608a506138df051a4b51d37e WatchSource:0}: Error finding container fbdfd0ee232860dda892e17c45c9a66839f517f7608a506138df051a4b51d37e: Status 404 returned error can't find the container with id fbdfd0ee232860dda892e17c45c9a66839f517f7608a506138df051a4b51d37e Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.778113 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:31:53 crc kubenswrapper[4728]: W1205 11:31:53.791896 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbac9d7a3_475b_4107_9a25_45fe4f9756cb.slice/crio-ef2a22da900a3f988a625ed0ef61aa8d3321edc76336093b83c0c6a3dad8282c WatchSource:0}: Error finding container ef2a22da900a3f988a625ed0ef61aa8d3321edc76336093b83c0c6a3dad8282c: Status 404 returned error can't find the container with id ef2a22da900a3f988a625ed0ef61aa8d3321edc76336093b83c0c6a3dad8282c Dec 05 11:31:53 crc kubenswrapper[4728]: I1205 11:31:53.824100 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.062078 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-k7g8m"] Dec 05 11:31:54 crc kubenswrapper[4728]: W1205 11:31:54.069878 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a537dee_6a60_42e4_a928_c43d4db07171.slice/crio-b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232 WatchSource:0}: Error finding container b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232: Status 404 returned error can't find the container with id b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232 Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.205840 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerStarted","Data":"c3d53ff7addae6f5ad68338e37016e54bd9c2a21b08866f681174f6c8d4133eb"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.212377 4728 generic.go:334] "Generic (PLEG): container finished" podID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerID="63c4847beb750fbf73d14ddae6bb3d52e2bd760bfa502bf4e25eefa32a128928" exitCode=0 Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.212455 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" event={"ID":"dfa85807-524b-4fa7-9cf4-b05a8b659c71","Type":"ContainerDied","Data":"63c4847beb750fbf73d14ddae6bb3d52e2bd760bfa502bf4e25eefa32a128928"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.212485 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" event={"ID":"dfa85807-524b-4fa7-9cf4-b05a8b659c71","Type":"ContainerStarted","Data":"d449bfde56058a5fcb7c7f7cc55ebeacd1018606f5c11cd293ae58e6da851fbe"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.229638 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7f559b48-333f-446d-bab9-177aa33286c2","Type":"ContainerStarted","Data":"fbdfd0ee232860dda892e17c45c9a66839f517f7608a506138df051a4b51d37e"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.244356 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" event={"ID":"3a537dee-6a60-42e4-a928-c43d4db07171","Type":"ContainerStarted","Data":"b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.246912 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bac9d7a3-475b-4107-9a25-45fe4f9756cb","Type":"ContainerStarted","Data":"ef2a22da900a3f988a625ed0ef61aa8d3321edc76336093b83c0c6a3dad8282c"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.248893 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerStarted","Data":"008cf08e1a6306a55264345325db536bf84a7ac65ac9534b5a99287b6856fe1a"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.258965 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6x7gh" event={"ID":"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e","Type":"ContainerStarted","Data":"bdf98166550c591cad71522435d651aa191745c5dd5cf6e1224e03dc98a3f859"} Dec 05 11:31:54 crc kubenswrapper[4728]: I1205 11:31:54.315683 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6x7gh" podStartSLOduration=3.315663985 podStartE2EDuration="3.315663985s" podCreationTimestamp="2025-12-05 11:31:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:54.285402508 +0000 UTC m=+1448.427525201" watchObservedRunningTime="2025-12-05 11:31:54.315663985 +0000 UTC m=+1448.457786698" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.274846 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" event={"ID":"dfa85807-524b-4fa7-9cf4-b05a8b659c71","Type":"ContainerStarted","Data":"71f9bc9cb7917f02b5226e6794bf3ebca9f83c660b2e9133b2c650dbdf71af50"} Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.275337 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.277231 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" event={"ID":"3a537dee-6a60-42e4-a928-c43d4db07171","Type":"ContainerStarted","Data":"b1909c12baffefe7a87547b87ac3efff4c8d6a0d7ce4f7abd4a5110d89613dbf"} Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.301782 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" podStartSLOduration=3.301765118 podStartE2EDuration="3.301765118s" podCreationTimestamp="2025-12-05 11:31:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:55.297322047 +0000 UTC m=+1449.439444750" watchObservedRunningTime="2025-12-05 11:31:55.301765118 +0000 UTC m=+1449.443887831" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.319527 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" podStartSLOduration=2.319511223 podStartE2EDuration="2.319511223s" podCreationTimestamp="2025-12-05 11:31:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:31:55.310462966 +0000 UTC m=+1449.452585679" watchObservedRunningTime="2025-12-05 11:31:55.319511223 +0000 UTC m=+1449.461633916" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.702106 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.702534 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.702675 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.703690 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:31:55 crc kubenswrapper[4728]: I1205 11:31:55.703823 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26" gracePeriod=600 Dec 05 11:31:56 crc kubenswrapper[4728]: I1205 11:31:56.256293 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:31:56 crc kubenswrapper[4728]: I1205 11:31:56.264715 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:31:56 crc kubenswrapper[4728]: I1205 11:31:56.298419 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26" exitCode=0 Dec 05 11:31:56 crc kubenswrapper[4728]: I1205 11:31:56.299595 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26"} Dec 05 11:31:56 crc kubenswrapper[4728]: I1205 11:31:56.299643 4728 scope.go:117] "RemoveContainer" containerID="d8efdde93e15953f6b7d53c9af0274517d1b20f2c87e219dbc82f0145e651995" Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.328254 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7f559b48-333f-446d-bab9-177aa33286c2","Type":"ContainerStarted","Data":"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd"} Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.332294 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bac9d7a3-475b-4107-9a25-45fe4f9756cb","Type":"ContainerStarted","Data":"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b"} Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.332372 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b" gracePeriod=30 Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.335312 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e"} Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.342317 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerStarted","Data":"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b"} Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.344827 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.676315619 podStartE2EDuration="6.344812773s" podCreationTimestamp="2025-12-05 11:31:52 +0000 UTC" firstStartedPulling="2025-12-05 11:31:53.725706296 +0000 UTC m=+1447.867828989" lastFinishedPulling="2025-12-05 11:31:57.39420343 +0000 UTC m=+1451.536326143" observedRunningTime="2025-12-05 11:31:58.341901573 +0000 UTC m=+1452.484024266" watchObservedRunningTime="2025-12-05 11:31:58.344812773 +0000 UTC m=+1452.486935476" Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.346496 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerStarted","Data":"3a19ced693d59db18eae63a9d5a3e6e7675cd7102c04a8bc3e3d85719ea7a42e"} Dec 05 11:31:58 crc kubenswrapper[4728]: I1205 11:31:58.394464 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.8401434759999997 podStartE2EDuration="6.394441809s" podCreationTimestamp="2025-12-05 11:31:52 +0000 UTC" firstStartedPulling="2025-12-05 11:31:53.810083932 +0000 UTC m=+1447.952206615" lastFinishedPulling="2025-12-05 11:31:57.364382255 +0000 UTC m=+1451.506504948" observedRunningTime="2025-12-05 11:31:58.388565408 +0000 UTC m=+1452.530688101" watchObservedRunningTime="2025-12-05 11:31:58.394441809 +0000 UTC m=+1452.536564502" Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.358199 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerStarted","Data":"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258"} Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.358280 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-log" containerID="cri-o://a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" gracePeriod=30 Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.358366 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-metadata" containerID="cri-o://5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" gracePeriod=30 Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.362913 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerStarted","Data":"94934ec710944a1aaa2e39c0315e60c173dee33a92d4a98d3f432e7a94451cd0"} Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.383890 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.494310279 podStartE2EDuration="7.383869743s" podCreationTimestamp="2025-12-05 11:31:52 +0000 UTC" firstStartedPulling="2025-12-05 11:31:53.412342594 +0000 UTC m=+1447.554465287" lastFinishedPulling="2025-12-05 11:31:57.301902058 +0000 UTC m=+1451.444024751" observedRunningTime="2025-12-05 11:31:59.383247226 +0000 UTC m=+1453.525369929" watchObservedRunningTime="2025-12-05 11:31:59.383869743 +0000 UTC m=+1453.525992446" Dec 05 11:31:59 crc kubenswrapper[4728]: I1205 11:31:59.409543 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.377158709 podStartE2EDuration="7.409524874s" podCreationTimestamp="2025-12-05 11:31:52 +0000 UTC" firstStartedPulling="2025-12-05 11:31:53.331113805 +0000 UTC m=+1447.473236488" lastFinishedPulling="2025-12-05 11:31:57.36347996 +0000 UTC m=+1451.505602653" observedRunningTime="2025-12-05 11:31:59.405403811 +0000 UTC m=+1453.547526524" watchObservedRunningTime="2025-12-05 11:31:59.409524874 +0000 UTC m=+1453.551647567" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.086123 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.255264 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-798rb\" (UniqueName: \"kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb\") pod \"f4f746e6-fdda-4860-830d-34b37eff58a2\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.255358 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data\") pod \"f4f746e6-fdda-4860-830d-34b37eff58a2\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.255559 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs\") pod \"f4f746e6-fdda-4860-830d-34b37eff58a2\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.255590 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle\") pod \"f4f746e6-fdda-4860-830d-34b37eff58a2\" (UID: \"f4f746e6-fdda-4860-830d-34b37eff58a2\") " Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.255921 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs" (OuterVolumeSpecName: "logs") pod "f4f746e6-fdda-4860-830d-34b37eff58a2" (UID: "f4f746e6-fdda-4860-830d-34b37eff58a2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.256439 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f746e6-fdda-4860-830d-34b37eff58a2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.268040 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb" (OuterVolumeSpecName: "kube-api-access-798rb") pod "f4f746e6-fdda-4860-830d-34b37eff58a2" (UID: "f4f746e6-fdda-4860-830d-34b37eff58a2"). InnerVolumeSpecName "kube-api-access-798rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.327848 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data" (OuterVolumeSpecName: "config-data") pod "f4f746e6-fdda-4860-830d-34b37eff58a2" (UID: "f4f746e6-fdda-4860-830d-34b37eff58a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.327966 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4f746e6-fdda-4860-830d-34b37eff58a2" (UID: "f4f746e6-fdda-4860-830d-34b37eff58a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.359027 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-798rb\" (UniqueName: \"kubernetes.io/projected/f4f746e6-fdda-4860-830d-34b37eff58a2-kube-api-access-798rb\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.359067 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.359077 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f746e6-fdda-4860-830d-34b37eff58a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407009 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerID="5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" exitCode=0 Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407045 4728 generic.go:334] "Generic (PLEG): container finished" podID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerID="a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" exitCode=143 Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407103 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407153 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerDied","Data":"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258"} Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407196 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerDied","Data":"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b"} Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407209 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f4f746e6-fdda-4860-830d-34b37eff58a2","Type":"ContainerDied","Data":"008cf08e1a6306a55264345325db536bf84a7ac65ac9534b5a99287b6856fe1a"} Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.407228 4728 scope.go:117] "RemoveContainer" containerID="5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.441615 4728 scope.go:117] "RemoveContainer" containerID="a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.441940 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.453869 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.457905 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:00 crc kubenswrapper[4728]: E1205 11:32:00.458428 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-metadata" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.458440 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-metadata" Dec 05 11:32:00 crc kubenswrapper[4728]: E1205 11:32:00.458472 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-log" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.458478 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-log" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.458663 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-metadata" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.458683 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" containerName="nova-metadata-log" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.459969 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.480138 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.480338 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.483172 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.530855 4728 scope.go:117] "RemoveContainer" containerID="5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" Dec 05 11:32:00 crc kubenswrapper[4728]: E1205 11:32:00.538391 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258\": container with ID starting with 5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258 not found: ID does not exist" containerID="5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.538442 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258"} err="failed to get container status \"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258\": rpc error: code = NotFound desc = could not find container \"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258\": container with ID starting with 5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258 not found: ID does not exist" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.538479 4728 scope.go:117] "RemoveContainer" containerID="a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" Dec 05 11:32:00 crc kubenswrapper[4728]: E1205 11:32:00.539063 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b\": container with ID starting with a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b not found: ID does not exist" containerID="a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.539100 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b"} err="failed to get container status \"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b\": rpc error: code = NotFound desc = could not find container \"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b\": container with ID starting with a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b not found: ID does not exist" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.539125 4728 scope.go:117] "RemoveContainer" containerID="5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.540300 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258"} err="failed to get container status \"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258\": rpc error: code = NotFound desc = could not find container \"5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258\": container with ID starting with 5350bc9bfad1573b5cc62a1a2fcfa2ee994070b788bb13fdecec920275734258 not found: ID does not exist" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.540341 4728 scope.go:117] "RemoveContainer" containerID="a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.541076 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b"} err="failed to get container status \"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b\": rpc error: code = NotFound desc = could not find container \"a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b\": container with ID starting with a40d7bd045000d7822ab8c174d7fc886a8b23a99d3cbfe58547b4acb984ca72b not found: ID does not exist" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.562676 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.562756 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.562784 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.562854 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbw5l\" (UniqueName: \"kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.562939 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.664932 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.664999 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.665018 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.665041 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbw5l\" (UniqueName: \"kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.665086 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.665451 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.669537 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.670012 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.670242 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.683526 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbw5l\" (UniqueName: \"kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l\") pod \"nova-metadata-0\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " pod="openstack/nova-metadata-0" Dec 05 11:32:00 crc kubenswrapper[4728]: I1205 11:32:00.854484 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:01 crc kubenswrapper[4728]: I1205 11:32:01.378718 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:01 crc kubenswrapper[4728]: W1205 11:32:01.379349 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e716093_e9b7_45db_b295_f7f423424f94.slice/crio-286066507b6822de15c3fca91203c168d683ad9a9e96d6a99e3da57561584110 WatchSource:0}: Error finding container 286066507b6822de15c3fca91203c168d683ad9a9e96d6a99e3da57561584110: Status 404 returned error can't find the container with id 286066507b6822de15c3fca91203c168d683ad9a9e96d6a99e3da57561584110 Dec 05 11:32:01 crc kubenswrapper[4728]: I1205 11:32:01.428847 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerStarted","Data":"286066507b6822de15c3fca91203c168d683ad9a9e96d6a99e3da57561584110"} Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.363322 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4f746e6-fdda-4860-830d-34b37eff58a2" path="/var/lib/kubelet/pods/f4f746e6-fdda-4860-830d-34b37eff58a2/volumes" Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.458133 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerStarted","Data":"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b"} Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.458212 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerStarted","Data":"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62"} Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.462929 4728 generic.go:334] "Generic (PLEG): container finished" podID="bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" containerID="bdf98166550c591cad71522435d651aa191745c5dd5cf6e1224e03dc98a3f859" exitCode=0 Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.463012 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6x7gh" event={"ID":"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e","Type":"ContainerDied","Data":"bdf98166550c591cad71522435d651aa191745c5dd5cf6e1224e03dc98a3f859"} Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.498521 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.498476813 podStartE2EDuration="2.498476813s" podCreationTimestamp="2025-12-05 11:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:02.479553786 +0000 UTC m=+1456.621676539" watchObservedRunningTime="2025-12-05 11:32:02.498476813 +0000 UTC m=+1456.640599516" Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.690547 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:32:02 crc kubenswrapper[4728]: I1205 11:32:02.690611 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.011256 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.120713 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.120783 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.143052 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.155255 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.207916 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.230991 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.231250 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="dnsmasq-dns" containerID="cri-o://3f84844352374fd98447dc00dc547415d652ffffbd1b426deb3aeb4a7fe50fda" gracePeriod=10 Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.312825 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.182:5353: connect: connection refused" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.475270 4728 generic.go:334] "Generic (PLEG): container finished" podID="3a537dee-6a60-42e4-a928-c43d4db07171" containerID="b1909c12baffefe7a87547b87ac3efff4c8d6a0d7ce4f7abd4a5110d89613dbf" exitCode=0 Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.475334 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" event={"ID":"3a537dee-6a60-42e4-a928-c43d4db07171","Type":"ContainerDied","Data":"b1909c12baffefe7a87547b87ac3efff4c8d6a0d7ce4f7abd4a5110d89613dbf"} Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.485175 4728 generic.go:334] "Generic (PLEG): container finished" podID="e01300fa-c016-435f-9d98-325203486428" containerID="3f84844352374fd98447dc00dc547415d652ffffbd1b426deb3aeb4a7fe50fda" exitCode=0 Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.485408 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" event={"ID":"e01300fa-c016-435f-9d98-325203486428","Type":"ContainerDied","Data":"3f84844352374fd98447dc00dc547415d652ffffbd1b426deb3aeb4a7fe50fda"} Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.517877 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.773009 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.773420 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.883335 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:32:03 crc kubenswrapper[4728]: I1205 11:32:03.889246 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.071828 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts\") pod \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072263 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072310 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data\") pod \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072356 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr22h\" (UniqueName: \"kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h\") pod \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072396 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle\") pod \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\" (UID: \"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072483 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072547 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5xfg\" (UniqueName: \"kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072602 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072632 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.072656 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc\") pod \"e01300fa-c016-435f-9d98-325203486428\" (UID: \"e01300fa-c016-435f-9d98-325203486428\") " Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.080246 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg" (OuterVolumeSpecName: "kube-api-access-f5xfg") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "kube-api-access-f5xfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.080951 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h" (OuterVolumeSpecName: "kube-api-access-rr22h") pod "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" (UID: "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e"). InnerVolumeSpecName "kube-api-access-rr22h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.105209 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts" (OuterVolumeSpecName: "scripts") pod "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" (UID: "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.111923 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data" (OuterVolumeSpecName: "config-data") pod "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" (UID: "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.125750 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" (UID: "bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.140388 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.143003 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.150025 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.158441 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175029 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175067 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr22h\" (UniqueName: \"kubernetes.io/projected/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-kube-api-access-rr22h\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175080 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175090 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175100 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5xfg\" (UniqueName: \"kubernetes.io/projected/e01300fa-c016-435f-9d98-325203486428-kube-api-access-f5xfg\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175110 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175119 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175131 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.175140 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.179744 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config" (OuterVolumeSpecName: "config") pod "e01300fa-c016-435f-9d98-325203486428" (UID: "e01300fa-c016-435f-9d98-325203486428"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.276578 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e01300fa-c016-435f-9d98-325203486428-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.498007 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" event={"ID":"e01300fa-c016-435f-9d98-325203486428","Type":"ContainerDied","Data":"cdcbfdbc00788ae5f33986021c4354c7a891a878f9d4385367a6367c78174faa"} Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.498069 4728 scope.go:117] "RemoveContainer" containerID="3f84844352374fd98447dc00dc547415d652ffffbd1b426deb3aeb4a7fe50fda" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.498249 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56696ff475-f9ztl" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.504018 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6x7gh" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.504071 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6x7gh" event={"ID":"bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e","Type":"ContainerDied","Data":"a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0"} Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.504088 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a553d71a8926aedbaba0c389d168558a6020d430d67048a476bac541f88995c0" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.527838 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.538205 4728 scope.go:117] "RemoveContainer" containerID="0a3838e0bbad998aeec7c54987def9d1fedf50726efc85b333d98e736b690973" Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.539974 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56696ff475-f9ztl"] Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.683341 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.684392 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-log" containerID="cri-o://3a19ced693d59db18eae63a9d5a3e6e7675cd7102c04a8bc3e3d85719ea7a42e" gracePeriod=30 Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.685072 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-api" containerID="cri-o://94934ec710944a1aaa2e39c0315e60c173dee33a92d4a98d3f432e7a94451cd0" gracePeriod=30 Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.717201 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.717399 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-log" containerID="cri-o://32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" gracePeriod=30 Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.717807 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-metadata" containerID="cri-o://32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" gracePeriod=30 Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.731052 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:04 crc kubenswrapper[4728]: I1205 11:32:04.998427 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.194443 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dr9z\" (UniqueName: \"kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z\") pod \"3a537dee-6a60-42e4-a928-c43d4db07171\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.194573 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts\") pod \"3a537dee-6a60-42e4-a928-c43d4db07171\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.194599 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle\") pod \"3a537dee-6a60-42e4-a928-c43d4db07171\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.194726 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data\") pod \"3a537dee-6a60-42e4-a928-c43d4db07171\" (UID: \"3a537dee-6a60-42e4-a928-c43d4db07171\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.200407 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z" (OuterVolumeSpecName: "kube-api-access-2dr9z") pod "3a537dee-6a60-42e4-a928-c43d4db07171" (UID: "3a537dee-6a60-42e4-a928-c43d4db07171"). InnerVolumeSpecName "kube-api-access-2dr9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.204722 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts" (OuterVolumeSpecName: "scripts") pod "3a537dee-6a60-42e4-a928-c43d4db07171" (UID: "3a537dee-6a60-42e4-a928-c43d4db07171"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.222468 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.228115 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data" (OuterVolumeSpecName: "config-data") pod "3a537dee-6a60-42e4-a928-c43d4db07171" (UID: "3a537dee-6a60-42e4-a928-c43d4db07171"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.258281 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a537dee-6a60-42e4-a928-c43d4db07171" (UID: "3a537dee-6a60-42e4-a928-c43d4db07171"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.297779 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dr9z\" (UniqueName: \"kubernetes.io/projected/3a537dee-6a60-42e4-a928-c43d4db07171-kube-api-access-2dr9z\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.297842 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.297856 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.297869 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a537dee-6a60-42e4-a928-c43d4db07171-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.399249 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data\") pod \"6e716093-e9b7-45db-b295-f7f423424f94\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.399540 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs\") pod \"6e716093-e9b7-45db-b295-f7f423424f94\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.399711 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs\") pod \"6e716093-e9b7-45db-b295-f7f423424f94\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.400180 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs" (OuterVolumeSpecName: "logs") pod "6e716093-e9b7-45db-b295-f7f423424f94" (UID: "6e716093-e9b7-45db-b295-f7f423424f94"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.400377 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbw5l\" (UniqueName: \"kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l\") pod \"6e716093-e9b7-45db-b295-f7f423424f94\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.400434 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle\") pod \"6e716093-e9b7-45db-b295-f7f423424f94\" (UID: \"6e716093-e9b7-45db-b295-f7f423424f94\") " Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.401696 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e716093-e9b7-45db-b295-f7f423424f94-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.406400 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l" (OuterVolumeSpecName: "kube-api-access-jbw5l") pod "6e716093-e9b7-45db-b295-f7f423424f94" (UID: "6e716093-e9b7-45db-b295-f7f423424f94"). InnerVolumeSpecName "kube-api-access-jbw5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.451966 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data" (OuterVolumeSpecName: "config-data") pod "6e716093-e9b7-45db-b295-f7f423424f94" (UID: "6e716093-e9b7-45db-b295-f7f423424f94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.460629 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e716093-e9b7-45db-b295-f7f423424f94" (UID: "6e716093-e9b7-45db-b295-f7f423424f94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.478864 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6e716093-e9b7-45db-b295-f7f423424f94" (UID: "6e716093-e9b7-45db-b295-f7f423424f94"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.509120 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbw5l\" (UniqueName: \"kubernetes.io/projected/6e716093-e9b7-45db-b295-f7f423424f94-kube-api-access-jbw5l\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.509226 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.509307 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.509467 4728 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e716093-e9b7-45db-b295-f7f423424f94-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517005 4728 generic.go:334] "Generic (PLEG): container finished" podID="6e716093-e9b7-45db-b295-f7f423424f94" containerID="32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" exitCode=0 Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517043 4728 generic.go:334] "Generic (PLEG): container finished" podID="6e716093-e9b7-45db-b295-f7f423424f94" containerID="32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" exitCode=143 Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517100 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerDied","Data":"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b"} Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517131 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerDied","Data":"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62"} Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517143 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e716093-e9b7-45db-b295-f7f423424f94","Type":"ContainerDied","Data":"286066507b6822de15c3fca91203c168d683ad9a9e96d6a99e3da57561584110"} Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517160 4728 scope.go:117] "RemoveContainer" containerID="32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.517273 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.531170 4728 generic.go:334] "Generic (PLEG): container finished" podID="76948b41-b61f-4f2b-af11-eff84f150dab" containerID="3a19ced693d59db18eae63a9d5a3e6e7675cd7102c04a8bc3e3d85719ea7a42e" exitCode=143 Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.531302 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerDied","Data":"3a19ced693d59db18eae63a9d5a3e6e7675cd7102c04a8bc3e3d85719ea7a42e"} Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.534055 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" event={"ID":"3a537dee-6a60-42e4-a928-c43d4db07171","Type":"ContainerDied","Data":"b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232"} Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.534088 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-k7g8m" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.534106 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b927b172899b171f1f0f39e3d55cce0459299c6697af925c8d724abab07f2232" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.535319 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7f559b48-333f-446d-bab9-177aa33286c2" containerName="nova-scheduler-scheduler" containerID="cri-o://7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" gracePeriod=30 Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.581412 4728 scope.go:117] "RemoveContainer" containerID="32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.598758 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.615571 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.623410 4728 scope.go:117] "RemoveContainer" containerID="32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.624469 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b\": container with ID starting with 32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b not found: ID does not exist" containerID="32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.624678 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b"} err="failed to get container status \"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b\": rpc error: code = NotFound desc = could not find container \"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b\": container with ID starting with 32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b not found: ID does not exist" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.624718 4728 scope.go:117] "RemoveContainer" containerID="32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.625784 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62\": container with ID starting with 32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62 not found: ID does not exist" containerID="32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.625852 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62"} err="failed to get container status \"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62\": rpc error: code = NotFound desc = could not find container \"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62\": container with ID starting with 32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62 not found: ID does not exist" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.625884 4728 scope.go:117] "RemoveContainer" containerID="32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.626155 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b"} err="failed to get container status \"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b\": rpc error: code = NotFound desc = could not find container \"32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b\": container with ID starting with 32f78df62e80e9cbe6fa6fde1acebf6abe4c61082e0d88daa1960f3636c0931b not found: ID does not exist" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.626181 4728 scope.go:117] "RemoveContainer" containerID="32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.626388 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62"} err="failed to get container status \"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62\": rpc error: code = NotFound desc = could not find container \"32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62\": container with ID starting with 32ceab692018e1cf3dec3d182b7a6ccb1e7ebfe1569e5dd7f068d4c616d2fc62 not found: ID does not exist" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632239 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632601 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-log" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632618 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-log" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632639 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a537dee-6a60-42e4-a928-c43d4db07171" containerName="nova-cell1-conductor-db-sync" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632645 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a537dee-6a60-42e4-a928-c43d4db07171" containerName="nova-cell1-conductor-db-sync" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632660 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="init" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632668 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="init" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632683 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="dnsmasq-dns" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632689 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="dnsmasq-dns" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632706 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" containerName="nova-manage" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632712 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" containerName="nova-manage" Dec 05 11:32:05 crc kubenswrapper[4728]: E1205 11:32:05.632723 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-metadata" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632729 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-metadata" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632931 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-log" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632946 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e01300fa-c016-435f-9d98-325203486428" containerName="dnsmasq-dns" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632961 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" containerName="nova-manage" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632974 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e716093-e9b7-45db-b295-f7f423424f94" containerName="nova-metadata-metadata" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.632983 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a537dee-6a60-42e4-a928-c43d4db07171" containerName="nova-cell1-conductor-db-sync" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.633628 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.637263 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.642614 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.649959 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.658382 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.658524 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.660990 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.672866 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713540 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713633 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713666 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj8b4\" (UniqueName: \"kubernetes.io/projected/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-kube-api-access-hj8b4\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713687 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhrkv\" (UniqueName: \"kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713872 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.713994 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.714132 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.714191 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816155 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816268 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816290 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816340 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.816367 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj8b4\" (UniqueName: \"kubernetes.io/projected/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-kube-api-access-hj8b4\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.817350 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhrkv\" (UniqueName: \"kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.817622 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.818398 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.820073 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.820194 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.822432 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.822987 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.823930 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.838867 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhrkv\" (UniqueName: \"kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv\") pod \"nova-metadata-0\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " pod="openstack/nova-metadata-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.839214 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj8b4\" (UniqueName: \"kubernetes.io/projected/50d65ab3-36a4-45da-bfbd-b66ff1541c6b-kube-api-access-hj8b4\") pod \"nova-cell1-conductor-0\" (UID: \"50d65ab3-36a4-45da-bfbd-b66ff1541c6b\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.951196 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:05 crc kubenswrapper[4728]: I1205 11:32:05.967487 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.367674 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e716093-e9b7-45db-b295-f7f423424f94" path="/var/lib/kubelet/pods/6e716093-e9b7-45db-b295-f7f423424f94/volumes" Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.368558 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e01300fa-c016-435f-9d98-325203486428" path="/var/lib/kubelet/pods/e01300fa-c016-435f-9d98-325203486428/volumes" Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.429261 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.525747 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:06 crc kubenswrapper[4728]: W1205 11:32:06.531840 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bc84149_614a_4651_8686_eef191b3d230.slice/crio-16d265493882a124401d1787895f97609d0dd76722e0b36f1d9731d763de2a84 WatchSource:0}: Error finding container 16d265493882a124401d1787895f97609d0dd76722e0b36f1d9731d763de2a84: Status 404 returned error can't find the container with id 16d265493882a124401d1787895f97609d0dd76722e0b36f1d9731d763de2a84 Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.551600 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerStarted","Data":"16d265493882a124401d1787895f97609d0dd76722e0b36f1d9731d763de2a84"} Dec 05 11:32:06 crc kubenswrapper[4728]: I1205 11:32:06.554086 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"50d65ab3-36a4-45da-bfbd-b66ff1541c6b","Type":"ContainerStarted","Data":"19251f080246e41cebf7e6b04662bd924106eb7766b85f0c7efa5ed031945a19"} Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.569840 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"50d65ab3-36a4-45da-bfbd-b66ff1541c6b","Type":"ContainerStarted","Data":"38d2be8747968a60f811ddad8f70d6b8d7f6cd7f02b5f17d326f64a410322dc0"} Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.570204 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.575485 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerStarted","Data":"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c"} Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.575521 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerStarted","Data":"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af"} Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.593772 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.593753261 podStartE2EDuration="2.593753261s" podCreationTimestamp="2025-12-05 11:32:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:07.593194625 +0000 UTC m=+1461.735317318" watchObservedRunningTime="2025-12-05 11:32:07.593753261 +0000 UTC m=+1461.735875954" Dec 05 11:32:07 crc kubenswrapper[4728]: I1205 11:32:07.615546 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.615523885 podStartE2EDuration="2.615523885s" podCreationTimestamp="2025-12-05 11:32:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:07.609897062 +0000 UTC m=+1461.752019765" watchObservedRunningTime="2025-12-05 11:32:07.615523885 +0000 UTC m=+1461.757646658" Dec 05 11:32:08 crc kubenswrapper[4728]: E1205 11:32:08.123190 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:32:08 crc kubenswrapper[4728]: E1205 11:32:08.125390 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:32:08 crc kubenswrapper[4728]: E1205 11:32:08.127962 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:32:08 crc kubenswrapper[4728]: E1205 11:32:08.128013 4728 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7f559b48-333f-446d-bab9-177aa33286c2" containerName="nova-scheduler-scheduler" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.394490 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.609280 4728 generic.go:334] "Generic (PLEG): container finished" podID="76948b41-b61f-4f2b-af11-eff84f150dab" containerID="94934ec710944a1aaa2e39c0315e60c173dee33a92d4a98d3f432e7a94451cd0" exitCode=0 Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.609340 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerDied","Data":"94934ec710944a1aaa2e39c0315e60c173dee33a92d4a98d3f432e7a94451cd0"} Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.609365 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76948b41-b61f-4f2b-af11-eff84f150dab","Type":"ContainerDied","Data":"c3d53ff7addae6f5ad68338e37016e54bd9c2a21b08866f681174f6c8d4133eb"} Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.609376 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3d53ff7addae6f5ad68338e37016e54bd9c2a21b08866f681174f6c8d4133eb" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.653093 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.707278 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data\") pod \"76948b41-b61f-4f2b-af11-eff84f150dab\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.707703 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhpvh\" (UniqueName: \"kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh\") pod \"76948b41-b61f-4f2b-af11-eff84f150dab\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.707872 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs\") pod \"76948b41-b61f-4f2b-af11-eff84f150dab\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.708064 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle\") pod \"76948b41-b61f-4f2b-af11-eff84f150dab\" (UID: \"76948b41-b61f-4f2b-af11-eff84f150dab\") " Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.709090 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs" (OuterVolumeSpecName: "logs") pod "76948b41-b61f-4f2b-af11-eff84f150dab" (UID: "76948b41-b61f-4f2b-af11-eff84f150dab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.721039 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh" (OuterVolumeSpecName: "kube-api-access-jhpvh") pod "76948b41-b61f-4f2b-af11-eff84f150dab" (UID: "76948b41-b61f-4f2b-af11-eff84f150dab"). InnerVolumeSpecName "kube-api-access-jhpvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.739194 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data" (OuterVolumeSpecName: "config-data") pod "76948b41-b61f-4f2b-af11-eff84f150dab" (UID: "76948b41-b61f-4f2b-af11-eff84f150dab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.746817 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76948b41-b61f-4f2b-af11-eff84f150dab" (UID: "76948b41-b61f-4f2b-af11-eff84f150dab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.810596 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhpvh\" (UniqueName: \"kubernetes.io/projected/76948b41-b61f-4f2b-af11-eff84f150dab-kube-api-access-jhpvh\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.810635 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76948b41-b61f-4f2b-af11-eff84f150dab-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.810645 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:09 crc kubenswrapper[4728]: I1205 11:32:09.810654 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76948b41-b61f-4f2b-af11-eff84f150dab-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:10 crc kubenswrapper[4728]: E1205 11:32:10.045429 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f559b48_333f_446d_bab9_177aa33286c2.slice/crio-7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f559b48_333f_446d_bab9_177aa33286c2.slice/crio-conmon-7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.296440 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.318541 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqqgv\" (UniqueName: \"kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv\") pod \"7f559b48-333f-446d-bab9-177aa33286c2\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.318619 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle\") pod \"7f559b48-333f-446d-bab9-177aa33286c2\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.318664 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data\") pod \"7f559b48-333f-446d-bab9-177aa33286c2\" (UID: \"7f559b48-333f-446d-bab9-177aa33286c2\") " Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.324118 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv" (OuterVolumeSpecName: "kube-api-access-vqqgv") pod "7f559b48-333f-446d-bab9-177aa33286c2" (UID: "7f559b48-333f-446d-bab9-177aa33286c2"). InnerVolumeSpecName "kube-api-access-vqqgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.348202 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f559b48-333f-446d-bab9-177aa33286c2" (UID: "7f559b48-333f-446d-bab9-177aa33286c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.377527 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data" (OuterVolumeSpecName: "config-data") pod "7f559b48-333f-446d-bab9-177aa33286c2" (UID: "7f559b48-333f-446d-bab9-177aa33286c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.421108 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.421144 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f559b48-333f-446d-bab9-177aa33286c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.421157 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqqgv\" (UniqueName: \"kubernetes.io/projected/7f559b48-333f-446d-bab9-177aa33286c2-kube-api-access-vqqgv\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621056 4728 generic.go:334] "Generic (PLEG): container finished" podID="7f559b48-333f-446d-bab9-177aa33286c2" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" exitCode=0 Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621229 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621384 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621288 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7f559b48-333f-446d-bab9-177aa33286c2","Type":"ContainerDied","Data":"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd"} Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621433 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7f559b48-333f-446d-bab9-177aa33286c2","Type":"ContainerDied","Data":"fbdfd0ee232860dda892e17c45c9a66839f517f7608a506138df051a4b51d37e"} Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.621451 4728 scope.go:117] "RemoveContainer" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.657333 4728 scope.go:117] "RemoveContainer" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.659599 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: E1205 11:32:10.660000 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd\": container with ID starting with 7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd not found: ID does not exist" containerID="7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.660061 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd"} err="failed to get container status \"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd\": rpc error: code = NotFound desc = could not find container \"7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd\": container with ID starting with 7e0ccdaef1db79469cd2a084550d1d187b0086498785195074388dc934dfdebd not found: ID does not exist" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.676240 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.697296 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.710602 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.720798 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: E1205 11:32:10.721251 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-api" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721264 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-api" Dec 05 11:32:10 crc kubenswrapper[4728]: E1205 11:32:10.721278 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f559b48-333f-446d-bab9-177aa33286c2" containerName="nova-scheduler-scheduler" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721284 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f559b48-333f-446d-bab9-177aa33286c2" containerName="nova-scheduler-scheduler" Dec 05 11:32:10 crc kubenswrapper[4728]: E1205 11:32:10.721298 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-log" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721303 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-log" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721463 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f559b48-333f-446d-bab9-177aa33286c2" containerName="nova-scheduler-scheduler" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721474 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-log" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.721487 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" containerName="nova-api-api" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.722491 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.728503 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.729164 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd22g\" (UniqueName: \"kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.729317 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.732721 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.734308 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.736014 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.743345 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.747206 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.767765 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.790983 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834418 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834471 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834597 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2r7n\" (UniqueName: \"kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834634 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd22g\" (UniqueName: \"kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834668 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834747 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.834815 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.835211 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.838849 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.839699 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.850515 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd22g\" (UniqueName: \"kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g\") pod \"nova-api-0\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " pod="openstack/nova-api-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.936988 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2r7n\" (UniqueName: \"kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.937108 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.937134 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.940697 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.941527 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.954115 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2r7n\" (UniqueName: \"kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n\") pod \"nova-scheduler-0\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.967560 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:32:10 crc kubenswrapper[4728]: I1205 11:32:10.967699 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:32:11 crc kubenswrapper[4728]: I1205 11:32:11.053870 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:11 crc kubenswrapper[4728]: I1205 11:32:11.072237 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:11 crc kubenswrapper[4728]: W1205 11:32:11.711192 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc7732bf_7470_4243_9533_74f7432791bd.slice/crio-88fa5c91725a9c24d38f311307fda0d5e5716019e8cea9bf8491c755a3742548 WatchSource:0}: Error finding container 88fa5c91725a9c24d38f311307fda0d5e5716019e8cea9bf8491c755a3742548: Status 404 returned error can't find the container with id 88fa5c91725a9c24d38f311307fda0d5e5716019e8cea9bf8491c755a3742548 Dec 05 11:32:11 crc kubenswrapper[4728]: I1205 11:32:11.717788 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:11 crc kubenswrapper[4728]: I1205 11:32:11.776852 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:11 crc kubenswrapper[4728]: W1205 11:32:11.792440 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ec1f4a3_260f_4d09_929c_1c625845b8fa.slice/crio-b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919 WatchSource:0}: Error finding container b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919: Status 404 returned error can't find the container with id b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919 Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.369252 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76948b41-b61f-4f2b-af11-eff84f150dab" path="/var/lib/kubelet/pods/76948b41-b61f-4f2b-af11-eff84f150dab/volumes" Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.370388 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f559b48-333f-446d-bab9-177aa33286c2" path="/var/lib/kubelet/pods/7f559b48-333f-446d-bab9-177aa33286c2/volumes" Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.645209 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerStarted","Data":"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62"} Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.645257 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerStarted","Data":"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c"} Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.645269 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerStarted","Data":"88fa5c91725a9c24d38f311307fda0d5e5716019e8cea9bf8491c755a3742548"} Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.647547 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ec1f4a3-260f-4d09-929c-1c625845b8fa","Type":"ContainerStarted","Data":"0f7a2586e480b180959bac03629689913722dfbce1480cb4e037f8a7c5044505"} Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.647584 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ec1f4a3-260f-4d09-929c-1c625845b8fa","Type":"ContainerStarted","Data":"b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919"} Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.670744 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.670719357 podStartE2EDuration="2.670719357s" podCreationTimestamp="2025-12-05 11:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:12.669252617 +0000 UTC m=+1466.811375320" watchObservedRunningTime="2025-12-05 11:32:12.670719357 +0000 UTC m=+1466.812842050" Dec 05 11:32:12 crc kubenswrapper[4728]: I1205 11:32:12.714270 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.714248896 podStartE2EDuration="2.714248896s" podCreationTimestamp="2025-12-05 11:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:12.700310726 +0000 UTC m=+1466.842433439" watchObservedRunningTime="2025-12-05 11:32:12.714248896 +0000 UTC m=+1466.856371599" Dec 05 11:32:13 crc kubenswrapper[4728]: I1205 11:32:13.198437 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:13 crc kubenswrapper[4728]: I1205 11:32:13.198895 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" containerName="kube-state-metrics" containerID="cri-o://387774b1662c767fedb047814be1a290942b34641a3dc916766205c410ad3532" gracePeriod=30 Dec 05 11:32:13 crc kubenswrapper[4728]: I1205 11:32:13.664963 4728 generic.go:334] "Generic (PLEG): container finished" podID="d32b489f-b040-4f20-badc-ef587eeb0960" containerID="387774b1662c767fedb047814be1a290942b34641a3dc916766205c410ad3532" exitCode=2 Dec 05 11:32:13 crc kubenswrapper[4728]: I1205 11:32:13.665067 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d32b489f-b040-4f20-badc-ef587eeb0960","Type":"ContainerDied","Data":"387774b1662c767fedb047814be1a290942b34641a3dc916766205c410ad3532"} Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.019969 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.077332 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx9dp\" (UniqueName: \"kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp\") pod \"d32b489f-b040-4f20-badc-ef587eeb0960\" (UID: \"d32b489f-b040-4f20-badc-ef587eeb0960\") " Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.084024 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp" (OuterVolumeSpecName: "kube-api-access-sx9dp") pod "d32b489f-b040-4f20-badc-ef587eeb0960" (UID: "d32b489f-b040-4f20-badc-ef587eeb0960"). InnerVolumeSpecName "kube-api-access-sx9dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.180212 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx9dp\" (UniqueName: \"kubernetes.io/projected/d32b489f-b040-4f20-badc-ef587eeb0960-kube-api-access-sx9dp\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.680762 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d32b489f-b040-4f20-badc-ef587eeb0960","Type":"ContainerDied","Data":"46c0e2b82741e525981d01205fe5f3ddde729eb1176514e2126cf9d20c72f4b2"} Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.680836 4728 scope.go:117] "RemoveContainer" containerID="387774b1662c767fedb047814be1a290942b34641a3dc916766205c410ad3532" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.680962 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.728397 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.746606 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.766771 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:14 crc kubenswrapper[4728]: E1205 11:32:14.767234 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" containerName="kube-state-metrics" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.767253 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" containerName="kube-state-metrics" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.767441 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" containerName="kube-state-metrics" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.768043 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.770603 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.775617 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.778645 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.899579 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsx87\" (UniqueName: \"kubernetes.io/projected/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-api-access-gsx87\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.899631 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.899722 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:14 crc kubenswrapper[4728]: I1205 11:32:14.899761 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.001113 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsx87\" (UniqueName: \"kubernetes.io/projected/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-api-access-gsx87\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.001161 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.001235 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.001267 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.007246 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.010888 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.027038 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.029749 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.030117 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-central-agent" containerID="cri-o://7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6" gracePeriod=30 Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.030385 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="proxy-httpd" containerID="cri-o://62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5" gracePeriod=30 Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.030572 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="sg-core" containerID="cri-o://df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446" gracePeriod=30 Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.030630 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-notification-agent" containerID="cri-o://369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d" gracePeriod=30 Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.032024 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsx87\" (UniqueName: \"kubernetes.io/projected/910efe4a-03b6-4aa7-aa87-d69b832a3db9-kube-api-access-gsx87\") pod \"kube-state-metrics-0\" (UID: \"910efe4a-03b6-4aa7-aa87-d69b832a3db9\") " pod="openstack/kube-state-metrics-0" Dec 05 11:32:15 crc kubenswrapper[4728]: I1205 11:32:15.097193 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.593990 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.693464 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"910efe4a-03b6-4aa7-aa87-d69b832a3db9","Type":"ContainerStarted","Data":"0ff016985a1ffee9dec0ed5f6496fdcb349e9e94aada79df7b49b5b22610959a"} Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698389 4728 generic.go:334] "Generic (PLEG): container finished" podID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerID="62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5" exitCode=0 Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698416 4728 generic.go:334] "Generic (PLEG): container finished" podID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerID="df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446" exitCode=2 Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698447 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerDied","Data":"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5"} Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698475 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerDied","Data":"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446"} Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698487 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerDied","Data":"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6"} Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.698457 4728 generic.go:334] "Generic (PLEG): container finished" podID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerID="7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6" exitCode=0 Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.968234 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:15.968641 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.004767 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.074192 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.367746 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d32b489f-b040-4f20-badc-ef587eeb0960" path="/var/lib/kubelet/pods/d32b489f-b040-4f20-badc-ef587eeb0960/volumes" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.708945 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"910efe4a-03b6-4aa7-aa87-d69b832a3db9","Type":"ContainerStarted","Data":"9412e56ce89ea0434d314614ab1cf360682d0d788bf6e3fab95154313d130837"} Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.710185 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.735737 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.394517632 podStartE2EDuration="2.735717414s" podCreationTimestamp="2025-12-05 11:32:14 +0000 UTC" firstStartedPulling="2025-12-05 11:32:15.59731198 +0000 UTC m=+1469.739434663" lastFinishedPulling="2025-12-05 11:32:15.938511742 +0000 UTC m=+1470.080634445" observedRunningTime="2025-12-05 11:32:16.728299212 +0000 UTC m=+1470.870421935" watchObservedRunningTime="2025-12-05 11:32:16.735717414 +0000 UTC m=+1470.877840117" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.981394 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:16 crc kubenswrapper[4728]: I1205 11:32:16.981441 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.654041 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.723338 4728 generic.go:334] "Generic (PLEG): container finished" podID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerID="369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d" exitCode=0 Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.723407 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.723437 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerDied","Data":"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d"} Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.723484 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca1fe40d-770a-49f8-b3cc-d1c43be92be9","Type":"ContainerDied","Data":"bf5a7da2210a30cead1562e519c52710c9ee31b2972559f2888c70b0a5f17c66"} Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.723506 4728 scope.go:117] "RemoveContainer" containerID="62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.746379 4728 scope.go:117] "RemoveContainer" containerID="df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.773098 4728 scope.go:117] "RemoveContainer" containerID="369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.777702 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.777776 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2c4x\" (UniqueName: \"kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.777901 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.778004 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.778096 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.778143 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.778210 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle\") pod \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\" (UID: \"ca1fe40d-770a-49f8-b3cc-d1c43be92be9\") " Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.778910 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.779078 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.785361 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x" (OuterVolumeSpecName: "kube-api-access-z2c4x") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "kube-api-access-z2c4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.786911 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts" (OuterVolumeSpecName: "scripts") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.811105 4728 scope.go:117] "RemoveContainer" containerID="7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.826078 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.865979 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880899 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880924 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880935 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880945 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2c4x\" (UniqueName: \"kubernetes.io/projected/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-kube-api-access-z2c4x\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880953 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.880961 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.883310 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data" (OuterVolumeSpecName: "config-data") pod "ca1fe40d-770a-49f8-b3cc-d1c43be92be9" (UID: "ca1fe40d-770a-49f8-b3cc-d1c43be92be9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.905768 4728 scope.go:117] "RemoveContainer" containerID="62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5" Dec 05 11:32:17 crc kubenswrapper[4728]: E1205 11:32:17.906356 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5\": container with ID starting with 62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5 not found: ID does not exist" containerID="62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.906392 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5"} err="failed to get container status \"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5\": rpc error: code = NotFound desc = could not find container \"62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5\": container with ID starting with 62b6484d4af7c71e54757b9a0a46e0a2b5397d82a636040ef2997e01a765ded5 not found: ID does not exist" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.906419 4728 scope.go:117] "RemoveContainer" containerID="df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446" Dec 05 11:32:17 crc kubenswrapper[4728]: E1205 11:32:17.907106 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446\": container with ID starting with df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446 not found: ID does not exist" containerID="df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.907127 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446"} err="failed to get container status \"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446\": rpc error: code = NotFound desc = could not find container \"df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446\": container with ID starting with df29a0aa67af47af483c828d03718f7e34781475b33abaf7e414afa3eed69446 not found: ID does not exist" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.907146 4728 scope.go:117] "RemoveContainer" containerID="369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d" Dec 05 11:32:17 crc kubenswrapper[4728]: E1205 11:32:17.907436 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d\": container with ID starting with 369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d not found: ID does not exist" containerID="369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.907461 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d"} err="failed to get container status \"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d\": rpc error: code = NotFound desc = could not find container \"369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d\": container with ID starting with 369b7d33887016d14af4e80b9e8f2950e223051e25212a2dc45bedae9c3ab71d not found: ID does not exist" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.907500 4728 scope.go:117] "RemoveContainer" containerID="7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6" Dec 05 11:32:17 crc kubenswrapper[4728]: E1205 11:32:17.907882 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6\": container with ID starting with 7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6 not found: ID does not exist" containerID="7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.907950 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6"} err="failed to get container status \"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6\": rpc error: code = NotFound desc = could not find container \"7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6\": container with ID starting with 7c72891662fc9050926147f82b35ac9b954630c3f111de8e672ada7d301559b6 not found: ID does not exist" Dec 05 11:32:17 crc kubenswrapper[4728]: I1205 11:32:17.983117 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca1fe40d-770a-49f8-b3cc-d1c43be92be9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.056490 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.066653 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.090243 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:18 crc kubenswrapper[4728]: E1205 11:32:18.090678 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="proxy-httpd" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.090699 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="proxy-httpd" Dec 05 11:32:18 crc kubenswrapper[4728]: E1205 11:32:18.090752 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-central-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.090777 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-central-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: E1205 11:32:18.090805 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="sg-core" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.090813 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="sg-core" Dec 05 11:32:18 crc kubenswrapper[4728]: E1205 11:32:18.090822 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-notification-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.090830 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-notification-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.091033 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-central-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.091053 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="proxy-httpd" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.091066 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="ceilometer-notification-agent" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.091086 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" containerName="sg-core" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.094749 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.096953 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.097209 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.108251 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.109334 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.288835 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.288903 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.288958 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rft6t\" (UniqueName: \"kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.289773 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.290021 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.290080 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.290110 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.290340 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.363291 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca1fe40d-770a-49f8-b3cc-d1c43be92be9" path="/var/lib/kubelet/pods/ca1fe40d-770a-49f8-b3cc-d1c43be92be9/volumes" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391482 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rft6t\" (UniqueName: \"kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391526 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391588 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391612 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391627 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391684 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391712 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.391745 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.392227 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.392301 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.396070 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.396331 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.397886 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.401783 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.404273 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.420367 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rft6t\" (UniqueName: \"kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t\") pod \"ceilometer-0\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " pod="openstack/ceilometer-0" Dec 05 11:32:18 crc kubenswrapper[4728]: I1205 11:32:18.712071 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:19 crc kubenswrapper[4728]: I1205 11:32:19.192276 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:19 crc kubenswrapper[4728]: I1205 11:32:19.757778 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerStarted","Data":"3c979536d8528584cf84c6cce7ae9068de7638b807d127e33ffdb7f894a506e5"} Dec 05 11:32:20 crc kubenswrapper[4728]: I1205 11:32:20.767323 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerStarted","Data":"6f3a5ec15671ac933c453e231526a3a388eeb3eab88b90bd48717bf87e5ecef0"} Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.054430 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.055258 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.072570 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.103438 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.784307 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerStarted","Data":"d9dc452492459336ebadafcd992302fdae5455d3b62f3589b393aed9623e22d9"} Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.784751 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerStarted","Data":"54a84f68c95fd31e034471220d266be24c799795b5039ee92d4bff852d1749d4"} Dec 05 11:32:21 crc kubenswrapper[4728]: I1205 11:32:21.828046 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:32:22 crc kubenswrapper[4728]: I1205 11:32:22.136961 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:22 crc kubenswrapper[4728]: I1205 11:32:22.136977 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:32:23 crc kubenswrapper[4728]: I1205 11:32:23.806383 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerStarted","Data":"f13dd0f545a7d70e7bba474314e4d59d4674f9cf5d06ef91561f7666b9d98798"} Dec 05 11:32:23 crc kubenswrapper[4728]: I1205 11:32:23.806947 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:32:23 crc kubenswrapper[4728]: I1205 11:32:23.842880 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.05656863 podStartE2EDuration="5.842860713s" podCreationTimestamp="2025-12-05 11:32:18 +0000 UTC" firstStartedPulling="2025-12-05 11:32:19.201358343 +0000 UTC m=+1473.343481036" lastFinishedPulling="2025-12-05 11:32:22.987650426 +0000 UTC m=+1477.129773119" observedRunningTime="2025-12-05 11:32:23.836366175 +0000 UTC m=+1477.978488888" watchObservedRunningTime="2025-12-05 11:32:23.842860713 +0000 UTC m=+1477.984983426" Dec 05 11:32:25 crc kubenswrapper[4728]: I1205 11:32:25.111924 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 11:32:25 crc kubenswrapper[4728]: I1205 11:32:25.977778 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:32:25 crc kubenswrapper[4728]: I1205 11:32:25.979939 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:32:25 crc kubenswrapper[4728]: I1205 11:32:25.986905 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:32:26 crc kubenswrapper[4728]: I1205 11:32:26.844767 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.848438 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.875086 4728 generic.go:334] "Generic (PLEG): container finished" podID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" containerID="b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b" exitCode=137 Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.875198 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bac9d7a3-475b-4107-9a25-45fe4f9756cb","Type":"ContainerDied","Data":"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b"} Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.875266 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bac9d7a3-475b-4107-9a25-45fe4f9756cb","Type":"ContainerDied","Data":"ef2a22da900a3f988a625ed0ef61aa8d3321edc76336093b83c0c6a3dad8282c"} Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.875305 4728 scope.go:117] "RemoveContainer" containerID="b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b" Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.876996 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.918775 4728 scope.go:117] "RemoveContainer" containerID="b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b" Dec 05 11:32:28 crc kubenswrapper[4728]: E1205 11:32:28.919537 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b\": container with ID starting with b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b not found: ID does not exist" containerID="b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b" Dec 05 11:32:28 crc kubenswrapper[4728]: I1205 11:32:28.919592 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b"} err="failed to get container status \"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b\": rpc error: code = NotFound desc = could not find container \"b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b\": container with ID starting with b9db0a739e5d89088ea06e611fc763aab9cf6c16c9b63b0fa0c44dc4dba3ba7b not found: ID does not exist" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.014781 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle\") pod \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.015127 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rvcq\" (UniqueName: \"kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq\") pod \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.015152 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data\") pod \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\" (UID: \"bac9d7a3-475b-4107-9a25-45fe4f9756cb\") " Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.029159 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq" (OuterVolumeSpecName: "kube-api-access-9rvcq") pod "bac9d7a3-475b-4107-9a25-45fe4f9756cb" (UID: "bac9d7a3-475b-4107-9a25-45fe4f9756cb"). InnerVolumeSpecName "kube-api-access-9rvcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.042098 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bac9d7a3-475b-4107-9a25-45fe4f9756cb" (UID: "bac9d7a3-475b-4107-9a25-45fe4f9756cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.063135 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data" (OuterVolumeSpecName: "config-data") pod "bac9d7a3-475b-4107-9a25-45fe4f9756cb" (UID: "bac9d7a3-475b-4107-9a25-45fe4f9756cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.117167 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rvcq\" (UniqueName: \"kubernetes.io/projected/bac9d7a3-475b-4107-9a25-45fe4f9756cb-kube-api-access-9rvcq\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.117197 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.117206 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bac9d7a3-475b-4107-9a25-45fe4f9756cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.218627 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.227918 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.244512 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:32:29 crc kubenswrapper[4728]: E1205 11:32:29.245021 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.245048 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.245255 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.246063 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.250306 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.250597 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.250909 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.263000 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.422785 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.422872 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.422893 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.423080 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.423367 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42hjx\" (UniqueName: \"kubernetes.io/projected/3c17270c-7319-4bc9-af0b-f008615371f9-kube-api-access-42hjx\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.524816 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.525178 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42hjx\" (UniqueName: \"kubernetes.io/projected/3c17270c-7319-4bc9-af0b-f008615371f9-kube-api-access-42hjx\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.525284 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.525320 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.525343 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.529244 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.529559 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.530277 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.531321 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c17270c-7319-4bc9-af0b-f008615371f9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.544503 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42hjx\" (UniqueName: \"kubernetes.io/projected/3c17270c-7319-4bc9-af0b-f008615371f9-kube-api-access-42hjx\") pod \"nova-cell1-novncproxy-0\" (UID: \"3c17270c-7319-4bc9-af0b-f008615371f9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:29 crc kubenswrapper[4728]: I1205 11:32:29.612543 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:30 crc kubenswrapper[4728]: I1205 11:32:30.030587 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:32:30 crc kubenswrapper[4728]: W1205 11:32:30.032492 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c17270c_7319_4bc9_af0b_f008615371f9.slice/crio-49f03d969a9f55977bfbae10ef7a310e6dc281122441927a5b1b28d5b1f04a0c WatchSource:0}: Error finding container 49f03d969a9f55977bfbae10ef7a310e6dc281122441927a5b1b28d5b1f04a0c: Status 404 returned error can't find the container with id 49f03d969a9f55977bfbae10ef7a310e6dc281122441927a5b1b28d5b1f04a0c Dec 05 11:32:30 crc kubenswrapper[4728]: I1205 11:32:30.376482 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac9d7a3-475b-4107-9a25-45fe4f9756cb" path="/var/lib/kubelet/pods/bac9d7a3-475b-4107-9a25-45fe4f9756cb/volumes" Dec 05 11:32:30 crc kubenswrapper[4728]: I1205 11:32:30.894654 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3c17270c-7319-4bc9-af0b-f008615371f9","Type":"ContainerStarted","Data":"d7132e3326c4fb78c901f2e05f2ac5cbb944b7c0caeb4a3a347f9ee92a4e698e"} Dec 05 11:32:30 crc kubenswrapper[4728]: I1205 11:32:30.894694 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3c17270c-7319-4bc9-af0b-f008615371f9","Type":"ContainerStarted","Data":"49f03d969a9f55977bfbae10ef7a310e6dc281122441927a5b1b28d5b1f04a0c"} Dec 05 11:32:30 crc kubenswrapper[4728]: I1205 11:32:30.918917 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.9188975799999999 podStartE2EDuration="1.91889758s" podCreationTimestamp="2025-12-05 11:32:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:30.909842313 +0000 UTC m=+1485.051965036" watchObservedRunningTime="2025-12-05 11:32:30.91889758 +0000 UTC m=+1485.061020273" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.058493 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.060042 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.062003 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.065705 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.908220 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:32:31 crc kubenswrapper[4728]: I1205 11:32:31.911943 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.182295 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.188218 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.204456 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.307543 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.307604 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.307717 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.307758 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.307832 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmgvd\" (UniqueName: \"kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.308049 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.411110 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.411400 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.411215 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.411549 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.412049 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.412340 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.411584 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.412447 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmgvd\" (UniqueName: \"kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.412861 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.413097 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.413778 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.433770 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmgvd\" (UniqueName: \"kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd\") pod \"dnsmasq-dns-5b4c997d87-lb4tw\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.519952 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:32 crc kubenswrapper[4728]: I1205 11:32:32.998409 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:32:33 crc kubenswrapper[4728]: W1205 11:32:33.003953 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e8c1db4_4dcb_4500_a280_5ce6e96f855d.slice/crio-7cd464d620e0abb780fae3df1d00d2d13f50964443b21528d641b54e27b97220 WatchSource:0}: Error finding container 7cd464d620e0abb780fae3df1d00d2d13f50964443b21528d641b54e27b97220: Status 404 returned error can't find the container with id 7cd464d620e0abb780fae3df1d00d2d13f50964443b21528d641b54e27b97220 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.900934 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.901579 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-central-agent" containerID="cri-o://6f3a5ec15671ac933c453e231526a3a388eeb3eab88b90bd48717bf87e5ecef0" gracePeriod=30 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.901702 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="sg-core" containerID="cri-o://d9dc452492459336ebadafcd992302fdae5455d3b62f3589b393aed9623e22d9" gracePeriod=30 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.901804 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="proxy-httpd" containerID="cri-o://f13dd0f545a7d70e7bba474314e4d59d4674f9cf5d06ef91561f7666b9d98798" gracePeriod=30 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.901712 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-notification-agent" containerID="cri-o://54a84f68c95fd31e034471220d266be24c799795b5039ee92d4bff852d1749d4" gracePeriod=30 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.950144 4728 generic.go:334] "Generic (PLEG): container finished" podID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerID="ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63" exitCode=0 Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.950207 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" event={"ID":"3e8c1db4-4dcb-4500-a280-5ce6e96f855d","Type":"ContainerDied","Data":"ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63"} Dec 05 11:32:33 crc kubenswrapper[4728]: I1205 11:32:33.950253 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" event={"ID":"3e8c1db4-4dcb-4500-a280-5ce6e96f855d","Type":"ContainerStarted","Data":"7cd464d620e0abb780fae3df1d00d2d13f50964443b21528d641b54e27b97220"} Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.006107 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.215:3000/\": read tcp 10.217.0.2:41758->10.217.0.215:3000: read: connection reset by peer" Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.536541 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.613418 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.963749 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" event={"ID":"3e8c1db4-4dcb-4500-a280-5ce6e96f855d","Type":"ContainerStarted","Data":"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3"} Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.964946 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968320 4728 generic.go:334] "Generic (PLEG): container finished" podID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerID="f13dd0f545a7d70e7bba474314e4d59d4674f9cf5d06ef91561f7666b9d98798" exitCode=0 Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968348 4728 generic.go:334] "Generic (PLEG): container finished" podID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerID="d9dc452492459336ebadafcd992302fdae5455d3b62f3589b393aed9623e22d9" exitCode=2 Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968357 4728 generic.go:334] "Generic (PLEG): container finished" podID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerID="6f3a5ec15671ac933c453e231526a3a388eeb3eab88b90bd48717bf87e5ecef0" exitCode=0 Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968491 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-log" containerID="cri-o://1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c" gracePeriod=30 Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968679 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerDied","Data":"f13dd0f545a7d70e7bba474314e4d59d4674f9cf5d06ef91561f7666b9d98798"} Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968707 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerDied","Data":"d9dc452492459336ebadafcd992302fdae5455d3b62f3589b393aed9623e22d9"} Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968716 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerDied","Data":"6f3a5ec15671ac933c453e231526a3a388eeb3eab88b90bd48717bf87e5ecef0"} Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.968758 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-api" containerID="cri-o://8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62" gracePeriod=30 Dec 05 11:32:34 crc kubenswrapper[4728]: I1205 11:32:34.992422 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" podStartSLOduration=2.99239997 podStartE2EDuration="2.99239997s" podCreationTimestamp="2025-12-05 11:32:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:34.989755198 +0000 UTC m=+1489.131877891" watchObservedRunningTime="2025-12-05 11:32:34.99239997 +0000 UTC m=+1489.134522663" Dec 05 11:32:35 crc kubenswrapper[4728]: I1205 11:32:35.982080 4728 generic.go:334] "Generic (PLEG): container finished" podID="dc7732bf-7470-4243-9533-74f7432791bd" containerID="1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c" exitCode=143 Dec 05 11:32:35 crc kubenswrapper[4728]: I1205 11:32:35.982241 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerDied","Data":"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c"} Dec 05 11:32:36 crc kubenswrapper[4728]: I1205 11:32:36.998094 4728 generic.go:334] "Generic (PLEG): container finished" podID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerID="54a84f68c95fd31e034471220d266be24c799795b5039ee92d4bff852d1749d4" exitCode=0 Dec 05 11:32:36 crc kubenswrapper[4728]: I1205 11:32:36.999129 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerDied","Data":"54a84f68c95fd31e034471220d266be24c799795b5039ee92d4bff852d1749d4"} Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.220771 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.302710 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.302814 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.302873 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.302915 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.302985 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rft6t\" (UniqueName: \"kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.303038 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.303077 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.303110 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd\") pod \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\" (UID: \"31a0b2ed-0629-4586-8bd2-3e728c70fc9b\") " Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.304027 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.304164 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.305300 4728 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.305332 4728 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.315514 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts" (OuterVolumeSpecName: "scripts") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.315714 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t" (OuterVolumeSpecName: "kube-api-access-rft6t") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "kube-api-access-rft6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.338764 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.360467 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.395624 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.407500 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.407545 4728 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.407557 4728 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.407573 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rft6t\" (UniqueName: \"kubernetes.io/projected/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-kube-api-access-rft6t\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.407586 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.408479 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data" (OuterVolumeSpecName: "config-data") pod "31a0b2ed-0629-4586-8bd2-3e728c70fc9b" (UID: "31a0b2ed-0629-4586-8bd2-3e728c70fc9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:37 crc kubenswrapper[4728]: I1205 11:32:37.509740 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a0b2ed-0629-4586-8bd2-3e728c70fc9b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.012094 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"31a0b2ed-0629-4586-8bd2-3e728c70fc9b","Type":"ContainerDied","Data":"3c979536d8528584cf84c6cce7ae9068de7638b807d127e33ffdb7f894a506e5"} Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.012164 4728 scope.go:117] "RemoveContainer" containerID="f13dd0f545a7d70e7bba474314e4d59d4674f9cf5d06ef91561f7666b9d98798" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.012363 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.054156 4728 scope.go:117] "RemoveContainer" containerID="d9dc452492459336ebadafcd992302fdae5455d3b62f3589b393aed9623e22d9" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.077745 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.082679 4728 scope.go:117] "RemoveContainer" containerID="54a84f68c95fd31e034471220d266be24c799795b5039ee92d4bff852d1749d4" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.085133 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.101484 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:38 crc kubenswrapper[4728]: E1205 11:32:38.102169 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="proxy-httpd" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102200 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="proxy-httpd" Dec 05 11:32:38 crc kubenswrapper[4728]: E1205 11:32:38.102249 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-central-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102262 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-central-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: E1205 11:32:38.102305 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-notification-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102323 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-notification-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: E1205 11:32:38.102341 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="sg-core" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102352 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="sg-core" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102712 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-notification-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102728 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="proxy-httpd" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102769 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="ceilometer-central-agent" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.102862 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" containerName="sg-core" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.106704 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.109305 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.109591 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.112287 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.126206 4728 scope.go:117] "RemoveContainer" containerID="6f3a5ec15671ac933c453e231526a3a388eeb3eab88b90bd48717bf87e5ecef0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.127477 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.232630 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.232777 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.232894 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmfmg\" (UniqueName: \"kubernetes.io/projected/a7d98205-ffa7-4388-8fff-66caf169466f-kube-api-access-xmfmg\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.232955 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.233035 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-scripts\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.233103 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.233152 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-config-data\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.233277 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.334881 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335305 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335389 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335434 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmfmg\" (UniqueName: \"kubernetes.io/projected/a7d98205-ffa7-4388-8fff-66caf169466f-kube-api-access-xmfmg\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335464 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335528 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-scripts\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335556 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335587 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335622 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-config-data\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.335775 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d98205-ffa7-4388-8fff-66caf169466f-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.344388 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.345489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.345538 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.352503 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-config-data\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.354008 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmfmg\" (UniqueName: \"kubernetes.io/projected/a7d98205-ffa7-4388-8fff-66caf169466f-kube-api-access-xmfmg\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.358556 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d98205-ffa7-4388-8fff-66caf169466f-scripts\") pod \"ceilometer-0\" (UID: \"a7d98205-ffa7-4388-8fff-66caf169466f\") " pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.372701 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31a0b2ed-0629-4586-8bd2-3e728c70fc9b" path="/var/lib/kubelet/pods/31a0b2ed-0629-4586-8bd2-3e728c70fc9b/volumes" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.447027 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.511491 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.650741 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data\") pod \"dc7732bf-7470-4243-9533-74f7432791bd\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.651035 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd22g\" (UniqueName: \"kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g\") pod \"dc7732bf-7470-4243-9533-74f7432791bd\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.651125 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle\") pod \"dc7732bf-7470-4243-9533-74f7432791bd\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.651217 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs\") pod \"dc7732bf-7470-4243-9533-74f7432791bd\" (UID: \"dc7732bf-7470-4243-9533-74f7432791bd\") " Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.651902 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs" (OuterVolumeSpecName: "logs") pod "dc7732bf-7470-4243-9533-74f7432791bd" (UID: "dc7732bf-7470-4243-9533-74f7432791bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.656013 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g" (OuterVolumeSpecName: "kube-api-access-rd22g") pod "dc7732bf-7470-4243-9533-74f7432791bd" (UID: "dc7732bf-7470-4243-9533-74f7432791bd"). InnerVolumeSpecName "kube-api-access-rd22g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.680358 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data" (OuterVolumeSpecName: "config-data") pod "dc7732bf-7470-4243-9533-74f7432791bd" (UID: "dc7732bf-7470-4243-9533-74f7432791bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.685680 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc7732bf-7470-4243-9533-74f7432791bd" (UID: "dc7732bf-7470-4243-9533-74f7432791bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.753573 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7732bf-7470-4243-9533-74f7432791bd-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.753608 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.753618 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd22g\" (UniqueName: \"kubernetes.io/projected/dc7732bf-7470-4243-9533-74f7432791bd-kube-api-access-rd22g\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.753627 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7732bf-7470-4243-9533-74f7432791bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:38 crc kubenswrapper[4728]: I1205 11:32:38.994643 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.026525 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d98205-ffa7-4388-8fff-66caf169466f","Type":"ContainerStarted","Data":"4e00e2e646bcf7294f53d5a535c22934076a3c92b05dbbb52db1347c77c69294"} Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.030119 4728 generic.go:334] "Generic (PLEG): container finished" podID="dc7732bf-7470-4243-9533-74f7432791bd" containerID="8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62" exitCode=0 Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.030455 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.030359 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerDied","Data":"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62"} Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.030584 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc7732bf-7470-4243-9533-74f7432791bd","Type":"ContainerDied","Data":"88fa5c91725a9c24d38f311307fda0d5e5716019e8cea9bf8491c755a3742548"} Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.030610 4728 scope.go:117] "RemoveContainer" containerID="8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.081434 4728 scope.go:117] "RemoveContainer" containerID="1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.098686 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.109163 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.113885 4728 scope.go:117] "RemoveContainer" containerID="8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62" Dec 05 11:32:39 crc kubenswrapper[4728]: E1205 11:32:39.114761 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62\": container with ID starting with 8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62 not found: ID does not exist" containerID="8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.114830 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62"} err="failed to get container status \"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62\": rpc error: code = NotFound desc = could not find container \"8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62\": container with ID starting with 8bc04f097276751610c57aa8179f94d8e053b759519414068b4daa13930eeb62 not found: ID does not exist" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.114862 4728 scope.go:117] "RemoveContainer" containerID="1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c" Dec 05 11:32:39 crc kubenswrapper[4728]: E1205 11:32:39.115298 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c\": container with ID starting with 1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c not found: ID does not exist" containerID="1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.115339 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c"} err="failed to get container status \"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c\": rpc error: code = NotFound desc = could not find container \"1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c\": container with ID starting with 1bb602c7e5525145caed4b735c55e4bd7510c4f1cc2f611bd78414ca5509967c not found: ID does not exist" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.122665 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:39 crc kubenswrapper[4728]: E1205 11:32:39.123142 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-log" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.123158 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-log" Dec 05 11:32:39 crc kubenswrapper[4728]: E1205 11:32:39.123178 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-api" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.123184 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-api" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.123371 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-api" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.123393 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7732bf-7470-4243-9533-74f7432791bd" containerName="nova-api-log" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.124544 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.127261 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.127488 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.127621 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.135472 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.262985 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.263054 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thbt7\" (UniqueName: \"kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.263122 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.263192 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.263261 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.263307 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.365032 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.365934 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.365973 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.366009 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thbt7\" (UniqueName: \"kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.366079 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.366417 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.366497 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.369917 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.369944 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.370294 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.378361 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.389428 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thbt7\" (UniqueName: \"kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7\") pod \"nova-api-0\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.447844 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.615234 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.643291 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:39 crc kubenswrapper[4728]: I1205 11:32:39.919626 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.047757 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d98205-ffa7-4388-8fff-66caf169466f","Type":"ContainerStarted","Data":"854f256359700ffb74487cfd23c4ab58cccbf839007299b8fe21e5fb04ac704e"} Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.059503 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerStarted","Data":"80f3a398da2230c662fbe3d12216f5f61887a2659a68e3e0ac7ffa017ee014a4"} Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.083518 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.299029 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-xwznt"] Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.301140 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.303385 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.303718 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.319711 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xwznt"] Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.363890 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc7732bf-7470-4243-9533-74f7432791bd" path="/var/lib/kubelet/pods/dc7732bf-7470-4243-9533-74f7432791bd/volumes" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.397635 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.397679 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.397711 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.397734 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7c5z\" (UniqueName: \"kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.499233 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.499292 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.499319 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.499343 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7c5z\" (UniqueName: \"kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.504371 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.505471 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.506427 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.517349 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7c5z\" (UniqueName: \"kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z\") pod \"nova-cell1-cell-mapping-xwznt\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:40 crc kubenswrapper[4728]: I1205 11:32:40.635846 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:41 crc kubenswrapper[4728]: I1205 11:32:41.083453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d98205-ffa7-4388-8fff-66caf169466f","Type":"ContainerStarted","Data":"cea86657b75bf4647293ab1beeed9431182748a34842d85a91b13d4ee513b0cb"} Dec 05 11:32:41 crc kubenswrapper[4728]: I1205 11:32:41.093863 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerStarted","Data":"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2"} Dec 05 11:32:41 crc kubenswrapper[4728]: I1205 11:32:41.094036 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerStarted","Data":"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3"} Dec 05 11:32:41 crc kubenswrapper[4728]: I1205 11:32:41.118734 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.118714712 podStartE2EDuration="2.118714712s" podCreationTimestamp="2025-12-05 11:32:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:41.112333831 +0000 UTC m=+1495.254456534" watchObservedRunningTime="2025-12-05 11:32:41.118714712 +0000 UTC m=+1495.260837415" Dec 05 11:32:41 crc kubenswrapper[4728]: I1205 11:32:41.201879 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xwznt"] Dec 05 11:32:41 crc kubenswrapper[4728]: W1205 11:32:41.238952 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0b71e7d_96b0_4ce9_bbb4_bc168495b082.slice/crio-1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17 WatchSource:0}: Error finding container 1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17: Status 404 returned error can't find the container with id 1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17 Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.108501 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d98205-ffa7-4388-8fff-66caf169466f","Type":"ContainerStarted","Data":"a9aef9b67a6b08062eb5b902bb694afe1f730ee221b83579f052902c1c62a846"} Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.111417 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xwznt" event={"ID":"a0b71e7d-96b0-4ce9-bbb4-bc168495b082","Type":"ContainerStarted","Data":"2662f7d0df1631a589d833aac1575bcf135fc313919f4dcb1edde8a55a803594"} Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.111482 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xwznt" event={"ID":"a0b71e7d-96b0-4ce9-bbb4-bc168495b082","Type":"ContainerStarted","Data":"1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17"} Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.132080 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-xwznt" podStartSLOduration=2.132063948 podStartE2EDuration="2.132063948s" podCreationTimestamp="2025-12-05 11:32:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:42.128382925 +0000 UTC m=+1496.270505668" watchObservedRunningTime="2025-12-05 11:32:42.132063948 +0000 UTC m=+1496.274186641" Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.521681 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.626327 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:32:42 crc kubenswrapper[4728]: I1205 11:32:42.626886 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="dnsmasq-dns" containerID="cri-o://71f9bc9cb7917f02b5226e6794bf3ebca9f83c660b2e9133b2c650dbdf71af50" gracePeriod=10 Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.121144 4728 generic.go:334] "Generic (PLEG): container finished" podID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerID="71f9bc9cb7917f02b5226e6794bf3ebca9f83c660b2e9133b2c650dbdf71af50" exitCode=0 Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.122310 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" event={"ID":"dfa85807-524b-4fa7-9cf4-b05a8b659c71","Type":"ContainerDied","Data":"71f9bc9cb7917f02b5226e6794bf3ebca9f83c660b2e9133b2c650dbdf71af50"} Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.122334 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" event={"ID":"dfa85807-524b-4fa7-9cf4-b05a8b659c71","Type":"ContainerDied","Data":"d449bfde56058a5fcb7c7f7cc55ebeacd1018606f5c11cd293ae58e6da851fbe"} Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.122346 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d449bfde56058a5fcb7c7f7cc55ebeacd1018606f5c11cd293ae58e6da851fbe" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.161867 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.269802 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.269881 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.269942 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b84xr\" (UniqueName: \"kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.269973 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.270003 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.270031 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config\") pod \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\" (UID: \"dfa85807-524b-4fa7-9cf4-b05a8b659c71\") " Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.291047 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr" (OuterVolumeSpecName: "kube-api-access-b84xr") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "kube-api-access-b84xr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.324331 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.335985 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.336032 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.336717 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.342385 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config" (OuterVolumeSpecName: "config") pod "dfa85807-524b-4fa7-9cf4-b05a8b659c71" (UID: "dfa85807-524b-4fa7-9cf4-b05a8b659c71"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.371987 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.372022 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.372033 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b84xr\" (UniqueName: \"kubernetes.io/projected/dfa85807-524b-4fa7-9cf4-b05a8b659c71-kube-api-access-b84xr\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.372044 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.372053 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:43 crc kubenswrapper[4728]: I1205 11:32:43.372060 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfa85807-524b-4fa7-9cf4-b05a8b659c71-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:44 crc kubenswrapper[4728]: I1205 11:32:44.129916 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" Dec 05 11:32:44 crc kubenswrapper[4728]: I1205 11:32:44.178818 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:32:44 crc kubenswrapper[4728]: I1205 11:32:44.186915 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b6c754dc9-czxc8"] Dec 05 11:32:44 crc kubenswrapper[4728]: I1205 11:32:44.367301 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" path="/var/lib/kubelet/pods/dfa85807-524b-4fa7-9cf4-b05a8b659c71/volumes" Dec 05 11:32:46 crc kubenswrapper[4728]: I1205 11:32:46.174443 4728 generic.go:334] "Generic (PLEG): container finished" podID="a0b71e7d-96b0-4ce9-bbb4-bc168495b082" containerID="2662f7d0df1631a589d833aac1575bcf135fc313919f4dcb1edde8a55a803594" exitCode=0 Dec 05 11:32:46 crc kubenswrapper[4728]: I1205 11:32:46.175356 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xwznt" event={"ID":"a0b71e7d-96b0-4ce9-bbb4-bc168495b082","Type":"ContainerDied","Data":"2662f7d0df1631a589d833aac1575bcf135fc313919f4dcb1edde8a55a803594"} Dec 05 11:32:46 crc kubenswrapper[4728]: I1205 11:32:46.195881 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d98205-ffa7-4388-8fff-66caf169466f","Type":"ContainerStarted","Data":"3ac25d83fbc0898689c7c46d4e7e9fb275a59327a0d7163efc95bb56adc48bed"} Dec 05 11:32:46 crc kubenswrapper[4728]: I1205 11:32:46.196252 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:32:46 crc kubenswrapper[4728]: I1205 11:32:46.234942 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8721995919999999 podStartE2EDuration="8.234921693s" podCreationTimestamp="2025-12-05 11:32:38 +0000 UTC" firstStartedPulling="2025-12-05 11:32:39.00221249 +0000 UTC m=+1493.144335183" lastFinishedPulling="2025-12-05 11:32:45.364934571 +0000 UTC m=+1499.507057284" observedRunningTime="2025-12-05 11:32:46.221235516 +0000 UTC m=+1500.363358219" watchObservedRunningTime="2025-12-05 11:32:46.234921693 +0000 UTC m=+1500.377044396" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.614768 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.769577 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts\") pod \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.769633 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7c5z\" (UniqueName: \"kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z\") pod \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.769764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle\") pod \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.769830 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data\") pod \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\" (UID: \"a0b71e7d-96b0-4ce9-bbb4-bc168495b082\") " Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.776400 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z" (OuterVolumeSpecName: "kube-api-access-c7c5z") pod "a0b71e7d-96b0-4ce9-bbb4-bc168495b082" (UID: "a0b71e7d-96b0-4ce9-bbb4-bc168495b082"). InnerVolumeSpecName "kube-api-access-c7c5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.776826 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts" (OuterVolumeSpecName: "scripts") pod "a0b71e7d-96b0-4ce9-bbb4-bc168495b082" (UID: "a0b71e7d-96b0-4ce9-bbb4-bc168495b082"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.807120 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data" (OuterVolumeSpecName: "config-data") pod "a0b71e7d-96b0-4ce9-bbb4-bc168495b082" (UID: "a0b71e7d-96b0-4ce9-bbb4-bc168495b082"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.808332 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0b71e7d-96b0-4ce9-bbb4-bc168495b082" (UID: "a0b71e7d-96b0-4ce9-bbb4-bc168495b082"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.873218 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.873261 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.873276 4728 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:47 crc kubenswrapper[4728]: I1205 11:32:47.873295 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7c5z\" (UniqueName: \"kubernetes.io/projected/a0b71e7d-96b0-4ce9-bbb4-bc168495b082-kube-api-access-c7c5z\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.142620 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b6c754dc9-czxc8" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.207:5353: i/o timeout" Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.230881 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xwznt" event={"ID":"a0b71e7d-96b0-4ce9-bbb4-bc168495b082","Type":"ContainerDied","Data":"1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17"} Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.230922 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d114962278e3ba99919b2f17ed714dda9d467718f72bfeb2aff50b83edb5c17" Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.231000 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xwznt" Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.411818 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.412496 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-log" containerID="cri-o://c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" gracePeriod=30 Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.412550 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-api" containerID="cri-o://f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" gracePeriod=30 Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.436153 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.436620 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" containerName="nova-scheduler-scheduler" containerID="cri-o://0f7a2586e480b180959bac03629689913722dfbce1480cb4e037f8a7c5044505" gracePeriod=30 Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.451331 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.451566 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" containerID="cri-o://fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af" gracePeriod=30 Dec 05 11:32:48 crc kubenswrapper[4728]: I1205 11:32:48.451679 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" containerID="cri-o://6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c" gracePeriod=30 Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.013288 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.200500 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.200856 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.200923 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thbt7\" (UniqueName: \"kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.201071 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.201119 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.201163 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data\") pod \"485fec75-8b78-42b3-a6b8-dde110718b6c\" (UID: \"485fec75-8b78-42b3-a6b8-dde110718b6c\") " Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.202174 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs" (OuterVolumeSpecName: "logs") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.207901 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7" (OuterVolumeSpecName: "kube-api-access-thbt7") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "kube-api-access-thbt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.233447 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.249976 4728 generic.go:334] "Generic (PLEG): container finished" podID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerID="f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" exitCode=0 Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250016 4728 generic.go:334] "Generic (PLEG): container finished" podID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerID="c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" exitCode=143 Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250021 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250067 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerDied","Data":"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2"} Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250127 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerDied","Data":"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3"} Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250140 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"485fec75-8b78-42b3-a6b8-dde110718b6c","Type":"ContainerDied","Data":"80f3a398da2230c662fbe3d12216f5f61887a2659a68e3e0ac7ffa017ee014a4"} Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.250159 4728 scope.go:117] "RemoveContainer" containerID="f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.256125 4728 generic.go:334] "Generic (PLEG): container finished" podID="2bc84149-614a-4651-8686-eef191b3d230" containerID="fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af" exitCode=143 Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.256180 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerDied","Data":"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af"} Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.260092 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data" (OuterVolumeSpecName: "config-data") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.274754 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.279761 4728 scope.go:117] "RemoveContainer" containerID="c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.291857 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "485fec75-8b78-42b3-a6b8-dde110718b6c" (UID: "485fec75-8b78-42b3-a6b8-dde110718b6c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.297473 4728 scope.go:117] "RemoveContainer" containerID="f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.297992 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2\": container with ID starting with f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2 not found: ID does not exist" containerID="f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298018 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2"} err="failed to get container status \"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2\": rpc error: code = NotFound desc = could not find container \"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2\": container with ID starting with f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2 not found: ID does not exist" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298040 4728 scope.go:117] "RemoveContainer" containerID="c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.298437 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3\": container with ID starting with c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3 not found: ID does not exist" containerID="c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298489 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3"} err="failed to get container status \"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3\": rpc error: code = NotFound desc = could not find container \"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3\": container with ID starting with c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3 not found: ID does not exist" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298524 4728 scope.go:117] "RemoveContainer" containerID="f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298933 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2"} err="failed to get container status \"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2\": rpc error: code = NotFound desc = could not find container \"f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2\": container with ID starting with f649fafe101a00b5fda464c4333cdc54c78e32d9240804f2a9502ff5472340e2 not found: ID does not exist" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.298963 4728 scope.go:117] "RemoveContainer" containerID="c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.299300 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3"} err="failed to get container status \"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3\": rpc error: code = NotFound desc = could not find container \"c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3\": container with ID starting with c2a5cc41c30e97d9bb8c06db8bb72c71116740daa394fdafc6bf08f6cf4453e3 not found: ID does not exist" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303651 4728 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303677 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/485fec75-8b78-42b3-a6b8-dde110718b6c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303687 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303695 4728 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303704 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485fec75-8b78-42b3-a6b8-dde110718b6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.303713 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thbt7\" (UniqueName: \"kubernetes.io/projected/485fec75-8b78-42b3-a6b8-dde110718b6c-kube-api-access-thbt7\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.634809 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.646453 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.662716 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.663128 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="dnsmasq-dns" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663149 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="dnsmasq-dns" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.663165 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0b71e7d-96b0-4ce9-bbb4-bc168495b082" containerName="nova-manage" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663171 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0b71e7d-96b0-4ce9-bbb4-bc168495b082" containerName="nova-manage" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.663187 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="init" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663193 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="init" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.663217 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-log" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663222 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-log" Dec 05 11:32:49 crc kubenswrapper[4728]: E1205 11:32:49.663243 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-api" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663249 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-api" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663404 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-log" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663425 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa85807-524b-4fa7-9cf4-b05a8b659c71" containerName="dnsmasq-dns" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663434 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0b71e7d-96b0-4ce9-bbb4-bc168495b082" containerName="nova-manage" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.663445 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" containerName="nova-api-api" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.664413 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.666278 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.667002 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.669418 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.678811 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814431 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-config-data\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814557 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67b6j\" (UniqueName: \"kubernetes.io/projected/e7183619-beae-465b-86cf-ccbb710d4ac8-kube-api-access-67b6j\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814631 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7183619-beae-465b-86cf-ccbb710d4ac8-logs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814669 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-public-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814708 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.814753 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916176 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-config-data\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916575 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67b6j\" (UniqueName: \"kubernetes.io/projected/e7183619-beae-465b-86cf-ccbb710d4ac8-kube-api-access-67b6j\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916667 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7183619-beae-465b-86cf-ccbb710d4ac8-logs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916714 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-public-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916761 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.916837 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.917507 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7183619-beae-465b-86cf-ccbb710d4ac8-logs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.921056 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.921284 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-config-data\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.925092 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-public-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.929566 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7183619-beae-465b-86cf-ccbb710d4ac8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.935980 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67b6j\" (UniqueName: \"kubernetes.io/projected/e7183619-beae-465b-86cf-ccbb710d4ac8-kube-api-access-67b6j\") pod \"nova-api-0\" (UID: \"e7183619-beae-465b-86cf-ccbb710d4ac8\") " pod="openstack/nova-api-0" Dec 05 11:32:49 crc kubenswrapper[4728]: I1205 11:32:49.983536 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.267125 4728 generic.go:334] "Generic (PLEG): container finished" podID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" containerID="0f7a2586e480b180959bac03629689913722dfbce1480cb4e037f8a7c5044505" exitCode=0 Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.267212 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ec1f4a3-260f-4d09-929c-1c625845b8fa","Type":"ContainerDied","Data":"0f7a2586e480b180959bac03629689913722dfbce1480cb4e037f8a7c5044505"} Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.267524 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ec1f4a3-260f-4d09-929c-1c625845b8fa","Type":"ContainerDied","Data":"b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919"} Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.267541 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b201dd0f9b43a912e1c74615332d251789eb067754be913cae16e067895dd919" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.288890 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.364065 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485fec75-8b78-42b3-a6b8-dde110718b6c" path="/var/lib/kubelet/pods/485fec75-8b78-42b3-a6b8-dde110718b6c/volumes" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.424275 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle\") pod \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.424378 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data\") pod \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.425056 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2r7n\" (UniqueName: \"kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n\") pod \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\" (UID: \"8ec1f4a3-260f-4d09-929c-1c625845b8fa\") " Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.432014 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n" (OuterVolumeSpecName: "kube-api-access-d2r7n") pod "8ec1f4a3-260f-4d09-929c-1c625845b8fa" (UID: "8ec1f4a3-260f-4d09-929c-1c625845b8fa"). InnerVolumeSpecName "kube-api-access-d2r7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.457996 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.463445 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data" (OuterVolumeSpecName: "config-data") pod "8ec1f4a3-260f-4d09-929c-1c625845b8fa" (UID: "8ec1f4a3-260f-4d09-929c-1c625845b8fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.465522 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ec1f4a3-260f-4d09-929c-1c625845b8fa" (UID: "8ec1f4a3-260f-4d09-929c-1c625845b8fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.527528 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2r7n\" (UniqueName: \"kubernetes.io/projected/8ec1f4a3-260f-4d09-929c-1c625845b8fa-kube-api-access-d2r7n\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.527915 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:50 crc kubenswrapper[4728]: I1205 11:32:50.527926 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ec1f4a3-260f-4d09-929c-1c625845b8fa-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.279339 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.279355 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7183619-beae-465b-86cf-ccbb710d4ac8","Type":"ContainerStarted","Data":"b60d1adf95a8d49c6ee2923bee37f69acfeace69fcad506e16f9a4c3d718ac0f"} Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.279656 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7183619-beae-465b-86cf-ccbb710d4ac8","Type":"ContainerStarted","Data":"8f767e8b9d46f442ac229328504f00878cd2344e4953b3b82644a741a2c23ab8"} Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.279677 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7183619-beae-465b-86cf-ccbb710d4ac8","Type":"ContainerStarted","Data":"40bc5b770699bc55dba6134e2aff5ee25647f6d8eb937b0ef3ca2c4af204fb81"} Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.320611 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.320593158 podStartE2EDuration="2.320593158s" podCreationTimestamp="2025-12-05 11:32:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:51.302938841 +0000 UTC m=+1505.445061544" watchObservedRunningTime="2025-12-05 11:32:51.320593158 +0000 UTC m=+1505.462715851" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.333643 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.360488 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.406971 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:51 crc kubenswrapper[4728]: E1205 11:32:51.407468 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" containerName="nova-scheduler-scheduler" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.407491 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" containerName="nova-scheduler-scheduler" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.407699 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" containerName="nova-scheduler-scheduler" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.408416 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.410265 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.423582 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.445657 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.446425 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df94g\" (UniqueName: \"kubernetes.io/projected/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-kube-api-access-df94g\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.446501 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-config-data\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.548304 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df94g\" (UniqueName: \"kubernetes.io/projected/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-kube-api-access-df94g\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.548367 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-config-data\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.548554 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.558603 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-config-data\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.558677 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.571020 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df94g\" (UniqueName: \"kubernetes.io/projected/aa4cedf1-fd8d-4339-8569-f105adb2ca1a-kube-api-access-df94g\") pod \"nova-scheduler-0\" (UID: \"aa4cedf1-fd8d-4339-8569-f105adb2ca1a\") " pod="openstack/nova-scheduler-0" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.604549 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": read tcp 10.217.0.2:44246->10.217.0.211:8775: read: connection reset by peer" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.604590 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": read tcp 10.217.0.2:44262->10.217.0.211:8775: read: connection reset by peer" Dec 05 11:32:51 crc kubenswrapper[4728]: I1205 11:32:51.727404 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.000259 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.159925 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs\") pod \"2bc84149-614a-4651-8686-eef191b3d230\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.160944 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhrkv\" (UniqueName: \"kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv\") pod \"2bc84149-614a-4651-8686-eef191b3d230\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.161013 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle\") pod \"2bc84149-614a-4651-8686-eef191b3d230\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.161054 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs\") pod \"2bc84149-614a-4651-8686-eef191b3d230\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.161091 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data\") pod \"2bc84149-614a-4651-8686-eef191b3d230\" (UID: \"2bc84149-614a-4651-8686-eef191b3d230\") " Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.163436 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs" (OuterVolumeSpecName: "logs") pod "2bc84149-614a-4651-8686-eef191b3d230" (UID: "2bc84149-614a-4651-8686-eef191b3d230"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.165689 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv" (OuterVolumeSpecName: "kube-api-access-hhrkv") pod "2bc84149-614a-4651-8686-eef191b3d230" (UID: "2bc84149-614a-4651-8686-eef191b3d230"). InnerVolumeSpecName "kube-api-access-hhrkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.189198 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bc84149-614a-4651-8686-eef191b3d230" (UID: "2bc84149-614a-4651-8686-eef191b3d230"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.200089 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data" (OuterVolumeSpecName: "config-data") pod "2bc84149-614a-4651-8686-eef191b3d230" (UID: "2bc84149-614a-4651-8686-eef191b3d230"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.202637 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:32:52 crc kubenswrapper[4728]: W1205 11:32:52.205618 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa4cedf1_fd8d_4339_8569_f105adb2ca1a.slice/crio-20814fa68ba672d72358693f9a4905807f81bd1e81917cd64192f53c55ba5025 WatchSource:0}: Error finding container 20814fa68ba672d72358693f9a4905807f81bd1e81917cd64192f53c55ba5025: Status 404 returned error can't find the container with id 20814fa68ba672d72358693f9a4905807f81bd1e81917cd64192f53c55ba5025 Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.232127 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "2bc84149-614a-4651-8686-eef191b3d230" (UID: "2bc84149-614a-4651-8686-eef191b3d230"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.263951 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhrkv\" (UniqueName: \"kubernetes.io/projected/2bc84149-614a-4651-8686-eef191b3d230-kube-api-access-hhrkv\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.264001 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.264014 4728 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2bc84149-614a-4651-8686-eef191b3d230-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.264025 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.264036 4728 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bc84149-614a-4651-8686-eef191b3d230-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.292762 4728 generic.go:334] "Generic (PLEG): container finished" podID="2bc84149-614a-4651-8686-eef191b3d230" containerID="6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c" exitCode=0 Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.292831 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.292849 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerDied","Data":"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c"} Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.292920 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2bc84149-614a-4651-8686-eef191b3d230","Type":"ContainerDied","Data":"16d265493882a124401d1787895f97609d0dd76722e0b36f1d9731d763de2a84"} Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.292943 4728 scope.go:117] "RemoveContainer" containerID="6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.295112 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa4cedf1-fd8d-4339-8569-f105adb2ca1a","Type":"ContainerStarted","Data":"20814fa68ba672d72358693f9a4905807f81bd1e81917cd64192f53c55ba5025"} Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.313047 4728 scope.go:117] "RemoveContainer" containerID="fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.344830 4728 scope.go:117] "RemoveContainer" containerID="6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.345042 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:52 crc kubenswrapper[4728]: E1205 11:32:52.345281 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c\": container with ID starting with 6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c not found: ID does not exist" containerID="6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.345340 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c"} err="failed to get container status \"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c\": rpc error: code = NotFound desc = could not find container \"6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c\": container with ID starting with 6e8b620f0f9355b6539cdd24729553d33de80c9b836a57aa2bad7ca015bb9b0c not found: ID does not exist" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.345379 4728 scope.go:117] "RemoveContainer" containerID="fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af" Dec 05 11:32:52 crc kubenswrapper[4728]: E1205 11:32:52.345899 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af\": container with ID starting with fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af not found: ID does not exist" containerID="fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.345930 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af"} err="failed to get container status \"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af\": rpc error: code = NotFound desc = could not find container \"fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af\": container with ID starting with fc6f9217dda8df35da72d437bf3709010ced651b32f8bd424c6c3214e33ee9af not found: ID does not exist" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.366225 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ec1f4a3-260f-4d09-929c-1c625845b8fa" path="/var/lib/kubelet/pods/8ec1f4a3-260f-4d09-929c-1c625845b8fa/volumes" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.377325 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.395961 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:52 crc kubenswrapper[4728]: E1205 11:32:52.396684 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.396697 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" Dec 05 11:32:52 crc kubenswrapper[4728]: E1205 11:32:52.396722 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.396728 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.396952 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-metadata" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.396968 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc84149-614a-4651-8686-eef191b3d230" containerName="nova-metadata-log" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.398062 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.401780 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.401845 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.407938 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.467763 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.467832 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.467983 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-config-data\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.468010 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-logs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.468031 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzsf2\" (UniqueName: \"kubernetes.io/projected/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-kube-api-access-lzsf2\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.569937 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-config-data\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.570013 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-logs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.570044 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzsf2\" (UniqueName: \"kubernetes.io/projected/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-kube-api-access-lzsf2\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.570098 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.570132 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.571021 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-logs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.576097 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-config-data\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.576170 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.576194 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.587582 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzsf2\" (UniqueName: \"kubernetes.io/projected/7a68b1f8-9521-44c8-8a8e-5bc26bc28047-kube-api-access-lzsf2\") pod \"nova-metadata-0\" (UID: \"7a68b1f8-9521-44c8-8a8e-5bc26bc28047\") " pod="openstack/nova-metadata-0" Dec 05 11:32:52 crc kubenswrapper[4728]: I1205 11:32:52.724515 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:32:53 crc kubenswrapper[4728]: W1205 11:32:53.233857 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a68b1f8_9521_44c8_8a8e_5bc26bc28047.slice/crio-03f90991bf96f8bb5ca6aceb49eff16fb6425294e9a86149eec434db8d0ca391 WatchSource:0}: Error finding container 03f90991bf96f8bb5ca6aceb49eff16fb6425294e9a86149eec434db8d0ca391: Status 404 returned error can't find the container with id 03f90991bf96f8bb5ca6aceb49eff16fb6425294e9a86149eec434db8d0ca391 Dec 05 11:32:53 crc kubenswrapper[4728]: I1205 11:32:53.239447 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:32:53 crc kubenswrapper[4728]: I1205 11:32:53.311631 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa4cedf1-fd8d-4339-8569-f105adb2ca1a","Type":"ContainerStarted","Data":"0f014c2578c6b3bfb85efb0c91c56fefea6a613e333d677bb494499a030558a2"} Dec 05 11:32:53 crc kubenswrapper[4728]: I1205 11:32:53.314615 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7a68b1f8-9521-44c8-8a8e-5bc26bc28047","Type":"ContainerStarted","Data":"03f90991bf96f8bb5ca6aceb49eff16fb6425294e9a86149eec434db8d0ca391"} Dec 05 11:32:53 crc kubenswrapper[4728]: I1205 11:32:53.338244 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.33822077 podStartE2EDuration="2.33822077s" podCreationTimestamp="2025-12-05 11:32:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:53.327009465 +0000 UTC m=+1507.469132158" watchObservedRunningTime="2025-12-05 11:32:53.33822077 +0000 UTC m=+1507.480343473" Dec 05 11:32:54 crc kubenswrapper[4728]: I1205 11:32:54.325820 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7a68b1f8-9521-44c8-8a8e-5bc26bc28047","Type":"ContainerStarted","Data":"eb190a833cb14d318651cf97d2ad7d4186a4feaab783d324ace11d31f624cbfc"} Dec 05 11:32:54 crc kubenswrapper[4728]: I1205 11:32:54.326139 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7a68b1f8-9521-44c8-8a8e-5bc26bc28047","Type":"ContainerStarted","Data":"ef42378cf8d756633d5fcc6ac0c834106e1b90581de60570359455185f346547"} Dec 05 11:32:54 crc kubenswrapper[4728]: I1205 11:32:54.365974 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.365913619 podStartE2EDuration="2.365913619s" podCreationTimestamp="2025-12-05 11:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:32:54.348392735 +0000 UTC m=+1508.490515458" watchObservedRunningTime="2025-12-05 11:32:54.365913619 +0000 UTC m=+1508.508036312" Dec 05 11:32:54 crc kubenswrapper[4728]: I1205 11:32:54.407226 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bc84149-614a-4651-8686-eef191b3d230" path="/var/lib/kubelet/pods/2bc84149-614a-4651-8686-eef191b3d230/volumes" Dec 05 11:32:56 crc kubenswrapper[4728]: I1205 11:32:56.728157 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:32:57 crc kubenswrapper[4728]: I1205 11:32:57.725505 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:32:57 crc kubenswrapper[4728]: I1205 11:32:57.725891 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:32:59 crc kubenswrapper[4728]: I1205 11:32:59.983986 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:32:59 crc kubenswrapper[4728]: I1205 11:32:59.985221 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:33:00 crc kubenswrapper[4728]: I1205 11:33:00.997095 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e7183619-beae-465b-86cf-ccbb710d4ac8" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.221:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:33:00 crc kubenswrapper[4728]: I1205 11:33:00.997119 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e7183619-beae-465b-86cf-ccbb710d4ac8" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.221:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:33:01 crc kubenswrapper[4728]: I1205 11:33:01.727929 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:33:01 crc kubenswrapper[4728]: I1205 11:33:01.765375 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:33:02 crc kubenswrapper[4728]: I1205 11:33:02.454474 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:33:02 crc kubenswrapper[4728]: I1205 11:33:02.725087 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:33:02 crc kubenswrapper[4728]: I1205 11:33:02.725177 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:33:03 crc kubenswrapper[4728]: I1205 11:33:03.737574 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7a68b1f8-9521-44c8-8a8e-5bc26bc28047" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.223:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:33:03 crc kubenswrapper[4728]: I1205 11:33:03.737680 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7a68b1f8-9521-44c8-8a8e-5bc26bc28047" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.223:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.231640 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.236216 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.257934 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.336090 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.336463 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v22kz\" (UniqueName: \"kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.336486 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.438401 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.438471 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v22kz\" (UniqueName: \"kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.438821 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.439289 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.439668 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.465350 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v22kz\" (UniqueName: \"kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz\") pod \"redhat-operators-985t8\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:04 crc kubenswrapper[4728]: I1205 11:33:04.583672 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:05 crc kubenswrapper[4728]: I1205 11:33:05.031258 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:05 crc kubenswrapper[4728]: I1205 11:33:05.453461 4728 generic.go:334] "Generic (PLEG): container finished" podID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerID="fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f" exitCode=0 Dec 05 11:33:05 crc kubenswrapper[4728]: I1205 11:33:05.453499 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerDied","Data":"fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f"} Dec 05 11:33:05 crc kubenswrapper[4728]: I1205 11:33:05.453711 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerStarted","Data":"bed31632b0b6c79e276067885539a490a0b8d1fa90343c658911e74ca439e27e"} Dec 05 11:33:07 crc kubenswrapper[4728]: I1205 11:33:07.475756 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerStarted","Data":"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0"} Dec 05 11:33:08 crc kubenswrapper[4728]: I1205 11:33:08.566308 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 11:33:09 crc kubenswrapper[4728]: I1205 11:33:09.498809 4728 generic.go:334] "Generic (PLEG): container finished" podID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerID="558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0" exitCode=0 Dec 05 11:33:09 crc kubenswrapper[4728]: I1205 11:33:09.498836 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerDied","Data":"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0"} Dec 05 11:33:09 crc kubenswrapper[4728]: I1205 11:33:09.993694 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:33:09 crc kubenswrapper[4728]: I1205 11:33:09.995086 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:33:09 crc kubenswrapper[4728]: I1205 11:33:09.995306 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:33:10 crc kubenswrapper[4728]: I1205 11:33:10.004556 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:33:10 crc kubenswrapper[4728]: I1205 11:33:10.509921 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerStarted","Data":"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490"} Dec 05 11:33:10 crc kubenswrapper[4728]: I1205 11:33:10.510425 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:33:10 crc kubenswrapper[4728]: I1205 11:33:10.523698 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:33:10 crc kubenswrapper[4728]: I1205 11:33:10.537646 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-985t8" podStartSLOduration=2.087998365 podStartE2EDuration="6.537629358s" podCreationTimestamp="2025-12-05 11:33:04 +0000 UTC" firstStartedPulling="2025-12-05 11:33:05.455353779 +0000 UTC m=+1519.597476472" lastFinishedPulling="2025-12-05 11:33:09.904984772 +0000 UTC m=+1524.047107465" observedRunningTime="2025-12-05 11:33:10.525254224 +0000 UTC m=+1524.667376927" watchObservedRunningTime="2025-12-05 11:33:10.537629358 +0000 UTC m=+1524.679752051" Dec 05 11:33:12 crc kubenswrapper[4728]: I1205 11:33:12.732707 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:33:12 crc kubenswrapper[4728]: I1205 11:33:12.735085 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:33:12 crc kubenswrapper[4728]: I1205 11:33:12.744632 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:33:13 crc kubenswrapper[4728]: I1205 11:33:13.557491 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:33:14 crc kubenswrapper[4728]: I1205 11:33:14.585417 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:14 crc kubenswrapper[4728]: I1205 11:33:14.585468 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:15 crc kubenswrapper[4728]: I1205 11:33:15.640581 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-985t8" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="registry-server" probeResult="failure" output=< Dec 05 11:33:15 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 11:33:15 crc kubenswrapper[4728]: > Dec 05 11:33:22 crc kubenswrapper[4728]: I1205 11:33:22.532967 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.226704 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.229026 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.248762 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.275746 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjlkm\" (UniqueName: \"kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.275843 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.276082 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.377111 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.377189 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjlkm\" (UniqueName: \"kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.377233 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.377750 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.377765 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.399766 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjlkm\" (UniqueName: \"kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm\") pod \"community-operators-6mvjw\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.504665 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:23 crc kubenswrapper[4728]: I1205 11:33:23.558316 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:24 crc kubenswrapper[4728]: W1205 11:33:24.110787 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode89cf8d0_efb6_4c10_8ffd_061fc03079be.slice/crio-1b95da1c095f40795089c9c6cd1de5dddd2a1bcc67c2dfb03b2cbe3203c93681 WatchSource:0}: Error finding container 1b95da1c095f40795089c9c6cd1de5dddd2a1bcc67c2dfb03b2cbe3203c93681: Status 404 returned error can't find the container with id 1b95da1c095f40795089c9c6cd1de5dddd2a1bcc67c2dfb03b2cbe3203c93681 Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.121564 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.638976 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.680379 4728 generic.go:334] "Generic (PLEG): container finished" podID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerID="b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522" exitCode=0 Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.680421 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerDied","Data":"b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522"} Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.680448 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerStarted","Data":"1b95da1c095f40795089c9c6cd1de5dddd2a1bcc67c2dfb03b2cbe3203c93681"} Dec 05 11:33:24 crc kubenswrapper[4728]: I1205 11:33:24.706467 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:25 crc kubenswrapper[4728]: I1205 11:33:25.737565 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerStarted","Data":"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4"} Dec 05 11:33:26 crc kubenswrapper[4728]: I1205 11:33:26.748579 4728 generic.go:334] "Generic (PLEG): container finished" podID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerID="fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4" exitCode=0 Dec 05 11:33:26 crc kubenswrapper[4728]: I1205 11:33:26.748642 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerDied","Data":"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4"} Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.011343 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.011608 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-985t8" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="registry-server" containerID="cri-o://680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490" gracePeriod=2 Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.075970 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="rabbitmq" containerID="cri-o://021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77" gracePeriod=604796 Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.577069 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.759019 4728 generic.go:334] "Generic (PLEG): container finished" podID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerID="680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490" exitCode=0 Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.759084 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-985t8" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.759086 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerDied","Data":"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490"} Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.759130 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-985t8" event={"ID":"c1f0aeac-c794-4c5a-a4ed-1140183164cd","Type":"ContainerDied","Data":"bed31632b0b6c79e276067885539a490a0b8d1fa90343c658911e74ca439e27e"} Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.759151 4728 scope.go:117] "RemoveContainer" containerID="680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.760852 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v22kz\" (UniqueName: \"kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz\") pod \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.760910 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities\") pod \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.761041 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content\") pod \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\" (UID: \"c1f0aeac-c794-4c5a-a4ed-1140183164cd\") " Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.762042 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities" (OuterVolumeSpecName: "utilities") pod "c1f0aeac-c794-4c5a-a4ed-1140183164cd" (UID: "c1f0aeac-c794-4c5a-a4ed-1140183164cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.764929 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerStarted","Data":"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6"} Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.770869 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz" (OuterVolumeSpecName: "kube-api-access-v22kz") pod "c1f0aeac-c794-4c5a-a4ed-1140183164cd" (UID: "c1f0aeac-c794-4c5a-a4ed-1140183164cd"). InnerVolumeSpecName "kube-api-access-v22kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.787783 4728 scope.go:117] "RemoveContainer" containerID="558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.798057 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6mvjw" podStartSLOduration=2.346832186 podStartE2EDuration="4.798038376s" podCreationTimestamp="2025-12-05 11:33:23 +0000 UTC" firstStartedPulling="2025-12-05 11:33:24.682935188 +0000 UTC m=+1538.825057871" lastFinishedPulling="2025-12-05 11:33:27.134141368 +0000 UTC m=+1541.276264061" observedRunningTime="2025-12-05 11:33:27.792710591 +0000 UTC m=+1541.934833284" watchObservedRunningTime="2025-12-05 11:33:27.798038376 +0000 UTC m=+1541.940161069" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.853773 4728 scope.go:117] "RemoveContainer" containerID="fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.863234 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v22kz\" (UniqueName: \"kubernetes.io/projected/c1f0aeac-c794-4c5a-a4ed-1140183164cd-kube-api-access-v22kz\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.863487 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.871467 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="rabbitmq" containerID="cri-o://8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859" gracePeriod=604796 Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.873923 4728 scope.go:117] "RemoveContainer" containerID="680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490" Dec 05 11:33:27 crc kubenswrapper[4728]: E1205 11:33:27.874265 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490\": container with ID starting with 680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490 not found: ID does not exist" containerID="680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.874293 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490"} err="failed to get container status \"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490\": rpc error: code = NotFound desc = could not find container \"680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490\": container with ID starting with 680354dafa3f1fc274a3bea2899ecb5530eddfbd4e8a4ebea8db063d5c7fa490 not found: ID does not exist" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.874313 4728 scope.go:117] "RemoveContainer" containerID="558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0" Dec 05 11:33:27 crc kubenswrapper[4728]: E1205 11:33:27.874682 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0\": container with ID starting with 558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0 not found: ID does not exist" containerID="558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.874723 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0"} err="failed to get container status \"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0\": rpc error: code = NotFound desc = could not find container \"558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0\": container with ID starting with 558087dbeaf2928014f5974651e063433a36d50372a36ca050e20023f872d4e0 not found: ID does not exist" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.874737 4728 scope.go:117] "RemoveContainer" containerID="fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f" Dec 05 11:33:27 crc kubenswrapper[4728]: E1205 11:33:27.875077 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f\": container with ID starting with fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f not found: ID does not exist" containerID="fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.875107 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f"} err="failed to get container status \"fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f\": rpc error: code = NotFound desc = could not find container \"fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f\": container with ID starting with fe4e453b6a49dc2c8586d6337be99ae6e306380ab2eef0015f19933b553e871f not found: ID does not exist" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.899245 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1f0aeac-c794-4c5a-a4ed-1140183164cd" (UID: "c1f0aeac-c794-4c5a-a4ed-1140183164cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:27 crc kubenswrapper[4728]: I1205 11:33:27.965467 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1f0aeac-c794-4c5a-a4ed-1140183164cd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:28 crc kubenswrapper[4728]: I1205 11:33:28.089217 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:28 crc kubenswrapper[4728]: I1205 11:33:28.097410 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-985t8"] Dec 05 11:33:28 crc kubenswrapper[4728]: I1205 11:33:28.378503 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" path="/var/lib/kubelet/pods/c1f0aeac-c794-4c5a-a4ed-1140183164cd/volumes" Dec 05 11:33:29 crc kubenswrapper[4728]: I1205 11:33:29.071468 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 05 11:33:29 crc kubenswrapper[4728]: I1205 11:33:29.342688 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.559470 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.560183 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.626592 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.764140 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.825132 4728 generic.go:334] "Generic (PLEG): container finished" podID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerID="021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77" exitCode=0 Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.825183 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerDied","Data":"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77"} Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.825235 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b375e4c-1072-4e6b-be7b-4eea43abf413","Type":"ContainerDied","Data":"f8e2896bb32c40a66c0616539bdd6169fe38a2e356dd764500d48f60b601e90b"} Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.825232 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.825282 4728 scope.go:117] "RemoveContainer" containerID="021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.853654 4728 scope.go:117] "RemoveContainer" containerID="8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.892832 4728 scope.go:117] "RemoveContainer" containerID="021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77" Dec 05 11:33:33 crc kubenswrapper[4728]: E1205 11:33:33.893309 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77\": container with ID starting with 021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77 not found: ID does not exist" containerID="021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893357 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77"} err="failed to get container status \"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77\": rpc error: code = NotFound desc = could not find container \"021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77\": container with ID starting with 021780e8c029d2fc151cf9af1b1f211f7020acbe296320997c1da9ab53130d77 not found: ID does not exist" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893384 4728 scope.go:117] "RemoveContainer" containerID="8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893443 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893484 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893521 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893586 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893628 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893672 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893719 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5spwk\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893821 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893855 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893912 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.893979 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls\") pod \"4b375e4c-1072-4e6b-be7b-4eea43abf413\" (UID: \"4b375e4c-1072-4e6b-be7b-4eea43abf413\") " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.895021 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: E1205 11:33:33.904870 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e\": container with ID starting with 8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e not found: ID does not exist" containerID="8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.904916 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e"} err="failed to get container status \"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e\": rpc error: code = NotFound desc = could not find container \"8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e\": container with ID starting with 8adfc871f6cc2dd8a3d042f60b6adbb6306b6762f75c01fed28e659895a3180e not found: ID does not exist" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.905566 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.908608 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.909321 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk" (OuterVolumeSpecName: "kube-api-access-5spwk") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "kube-api-access-5spwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.912435 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info" (OuterVolumeSpecName: "pod-info") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.915551 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.922337 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.922462 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.928952 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.988085 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data" (OuterVolumeSpecName: "config-data") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996070 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996102 4728 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b375e4c-1072-4e6b-be7b-4eea43abf413-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996114 4728 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b375e4c-1072-4e6b-be7b-4eea43abf413-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996125 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996136 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5spwk\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-kube-api-access-5spwk\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996144 4728 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996152 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996171 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 11:33:33 crc kubenswrapper[4728]: I1205 11:33:33.996180 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.013712 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.027548 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf" (OuterVolumeSpecName: "server-conf") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.060533 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.070169 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4b375e4c-1072-4e6b-be7b-4eea43abf413" (UID: "4b375e4c-1072-4e6b-be7b-4eea43abf413"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.098213 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.098233 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b375e4c-1072-4e6b-be7b-4eea43abf413-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.098244 4728 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b375e4c-1072-4e6b-be7b-4eea43abf413-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.168612 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.179952 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.188760 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.189169 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="rabbitmq" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189181 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="rabbitmq" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.189214 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="setup-container" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189220 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="setup-container" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.189248 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="extract-utilities" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189256 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="extract-utilities" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.189268 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="extract-content" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189275 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="extract-content" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.189288 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="registry-server" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189293 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="registry-server" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189485 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" containerName="rabbitmq" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.189506 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1f0aeac-c794-4c5a-a4ed-1140183164cd" containerName="registry-server" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.190649 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.195706 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.195758 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.195717 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.195899 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.195991 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.197608 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b7kpg" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.197717 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.225274 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303439 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303541 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303633 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmj8h\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-kube-api-access-rmj8h\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303679 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303701 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303755 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f981b7a-7de1-4ce3-ae26-5693c659923d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303778 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f981b7a-7de1-4ce3-ae26-5693c659923d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.303929 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.304003 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.304157 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.304197 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.365908 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b375e4c-1072-4e6b-be7b-4eea43abf413" path="/var/lib/kubelet/pods/4b375e4c-1072-4e6b-be7b-4eea43abf413/volumes" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405527 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405580 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405619 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmj8h\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-kube-api-access-rmj8h\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405643 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405659 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f981b7a-7de1-4ce3-ae26-5693c659923d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405702 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f981b7a-7de1-4ce3-ae26-5693c659923d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405752 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405777 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405845 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.405920 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.406351 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.408972 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.409210 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.410131 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.410411 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.410901 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f981b7a-7de1-4ce3-ae26-5693c659923d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.411595 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.411602 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9f981b7a-7de1-4ce3-ae26-5693c659923d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.417710 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.422160 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9f981b7a-7de1-4ce3-ae26-5693c659923d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.425047 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmj8h\" (UniqueName: \"kubernetes.io/projected/9f981b7a-7de1-4ce3-ae26-5693c659923d-kube-api-access-rmj8h\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.448787 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"9f981b7a-7de1-4ce3-ae26-5693c659923d\") " pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.525286 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.527613 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.609530 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.610610 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.610766 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.610921 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611049 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611161 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611266 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611378 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611489 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htbpw\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611693 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.611814 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf\") pod \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\" (UID: \"1ffe010f-366a-454a-a8a3-639ed1cf0fdc\") " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.617540 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.630265 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info" (OuterVolumeSpecName: "pod-info") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.630923 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.634435 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.634696 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.636076 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.658020 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.688729 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw" (OuterVolumeSpecName: "kube-api-access-htbpw") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "kube-api-access-htbpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717125 4728 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717153 4728 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717162 4728 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717183 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717191 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717200 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717209 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.717218 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htbpw\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-kube-api-access-htbpw\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.725473 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data" (OuterVolumeSpecName: "config-data") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.726139 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf" (OuterVolumeSpecName: "server-conf") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.758900 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.820216 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.820255 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.820267 4728 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.862247 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.863266 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerDied","Data":"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859"} Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.863330 4728 scope.go:117] "RemoveContainer" containerID="8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.866012 4728 generic.go:334] "Generic (PLEG): container finished" podID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerID="8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859" exitCode=0 Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.866360 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ffe010f-366a-454a-a8a3-639ed1cf0fdc","Type":"ContainerDied","Data":"d9248dc63dfb7a8ae1da0eb7e96dd48756327c14299c126c63e96cf07952e471"} Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.909465 4728 scope.go:117] "RemoveContainer" containerID="d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.923118 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1ffe010f-366a-454a-a8a3-639ed1cf0fdc" (UID: "1ffe010f-366a-454a-a8a3-639ed1cf0fdc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.931428 4728 scope.go:117] "RemoveContainer" containerID="8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.931738 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859\": container with ID starting with 8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859 not found: ID does not exist" containerID="8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.931843 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859"} err="failed to get container status \"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859\": rpc error: code = NotFound desc = could not find container \"8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859\": container with ID starting with 8696f921f7cda96413571926919ec77e1d181e158bf83564c780b49fbebb8859 not found: ID does not exist" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.931953 4728 scope.go:117] "RemoveContainer" containerID="d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf" Dec 05 11:33:34 crc kubenswrapper[4728]: E1205 11:33:34.937714 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf\": container with ID starting with d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf not found: ID does not exist" containerID="d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf" Dec 05 11:33:34 crc kubenswrapper[4728]: I1205 11:33:34.937975 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf"} err="failed to get container status \"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf\": rpc error: code = NotFound desc = could not find container \"d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf\": container with ID starting with d1fe7cce520a9bd7d7211a892a3d6e4444e5b7752c415881b7288fb3265ef5cf not found: ID does not exist" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.023921 4728 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ffe010f-366a-454a-a8a3-639ed1cf0fdc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.161993 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.205553 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.214354 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.240587 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:35 crc kubenswrapper[4728]: E1205 11:33:35.241075 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="rabbitmq" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.241097 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="rabbitmq" Dec 05 11:33:35 crc kubenswrapper[4728]: E1205 11:33:35.241122 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="setup-container" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.241130 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="setup-container" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.241377 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" containerName="rabbitmq" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.242715 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.248553 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.248923 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.248946 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.249047 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.249074 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dgpzv" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.249084 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.249117 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.269850 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.329986 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330076 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/297a0136-c048-4d32-ae75-2691e2bb98b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330172 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/297a0136-c048-4d32-ae75-2691e2bb98b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330219 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330280 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330316 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330352 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330379 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330420 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330468 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.330496 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtz5h\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-kube-api-access-xtz5h\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432267 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/297a0136-c048-4d32-ae75-2691e2bb98b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432317 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432356 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432406 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432452 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432477 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432497 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432551 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432574 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtz5h\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-kube-api-access-xtz5h\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432595 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432636 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/297a0136-c048-4d32-ae75-2691e2bb98b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.432756 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.433044 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.434305 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.434496 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.435052 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/297a0136-c048-4d32-ae75-2691e2bb98b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.438136 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.438368 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/297a0136-c048-4d32-ae75-2691e2bb98b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.441403 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.441775 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/297a0136-c048-4d32-ae75-2691e2bb98b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.442285 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.452082 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtz5h\" (UniqueName: \"kubernetes.io/projected/297a0136-c048-4d32-ae75-2691e2bb98b4-kube-api-access-xtz5h\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.488434 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"297a0136-c048-4d32-ae75-2691e2bb98b4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.578906 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.879867 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f981b7a-7de1-4ce3-ae26-5693c659923d","Type":"ContainerStarted","Data":"847195b6343a797274068429ac6cdecfe2e6ea3e12f948f3017b85039b154155"} Dec 05 11:33:35 crc kubenswrapper[4728]: I1205 11:33:35.881501 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6mvjw" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="registry-server" containerID="cri-o://32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6" gracePeriod=2 Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.121244 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.363432 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ffe010f-366a-454a-a8a3-639ed1cf0fdc" path="/var/lib/kubelet/pods/1ffe010f-366a-454a-a8a3-639ed1cf0fdc/volumes" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.705207 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.763529 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities\") pod \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.763652 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjlkm\" (UniqueName: \"kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm\") pod \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.763834 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content\") pod \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\" (UID: \"e89cf8d0-efb6-4c10-8ffd-061fc03079be\") " Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.765261 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities" (OuterVolumeSpecName: "utilities") pod "e89cf8d0-efb6-4c10-8ffd-061fc03079be" (UID: "e89cf8d0-efb6-4c10-8ffd-061fc03079be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.772195 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm" (OuterVolumeSpecName: "kube-api-access-fjlkm") pod "e89cf8d0-efb6-4c10-8ffd-061fc03079be" (UID: "e89cf8d0-efb6-4c10-8ffd-061fc03079be"). InnerVolumeSpecName "kube-api-access-fjlkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.835656 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:33:36 crc kubenswrapper[4728]: E1205 11:33:36.836303 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="extract-content" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.836324 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="extract-content" Dec 05 11:33:36 crc kubenswrapper[4728]: E1205 11:33:36.836356 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="extract-utilities" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.836366 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="extract-utilities" Dec 05 11:33:36 crc kubenswrapper[4728]: E1205 11:33:36.836382 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="registry-server" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.836389 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="registry-server" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.836632 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerName="registry-server" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.837893 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.849388 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.853584 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.867424 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.867449 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjlkm\" (UniqueName: \"kubernetes.io/projected/e89cf8d0-efb6-4c10-8ffd-061fc03079be-kube-api-access-fjlkm\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.879034 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e89cf8d0-efb6-4c10-8ffd-061fc03079be" (UID: "e89cf8d0-efb6-4c10-8ffd-061fc03079be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.892898 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f981b7a-7de1-4ce3-ae26-5693c659923d","Type":"ContainerStarted","Data":"22a275ee086a57d83d14e02d3b9f0b46297e03690cc7d1ed65246a7eded263e1"} Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.896404 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"297a0136-c048-4d32-ae75-2691e2bb98b4","Type":"ContainerStarted","Data":"bbfd543ef8ae1a889e4a3bddf42db031f34c4c0cc0002749bd35827d30f029d4"} Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.898631 4728 generic.go:334] "Generic (PLEG): container finished" podID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" containerID="32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6" exitCode=0 Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.898669 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerDied","Data":"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6"} Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.898691 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvjw" event={"ID":"e89cf8d0-efb6-4c10-8ffd-061fc03079be","Type":"ContainerDied","Data":"1b95da1c095f40795089c9c6cd1de5dddd2a1bcc67c2dfb03b2cbe3203c93681"} Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.898707 4728 scope.go:117] "RemoveContainer" containerID="32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.898781 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvjw" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.929132 4728 scope.go:117] "RemoveContainer" containerID="fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.961377 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.965019 4728 scope.go:117] "RemoveContainer" containerID="b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.968990 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.991694 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.991749 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.991857 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zft9\" (UniqueName: \"kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.991980 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.992073 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.992511 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.992815 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89cf8d0-efb6-4c10-8ffd-061fc03079be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:36 crc kubenswrapper[4728]: I1205 11:33:36.995275 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6mvjw"] Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.002776 4728 scope.go:117] "RemoveContainer" containerID="32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6" Dec 05 11:33:37 crc kubenswrapper[4728]: E1205 11:33:37.003245 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6\": container with ID starting with 32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6 not found: ID does not exist" containerID="32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.003272 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6"} err="failed to get container status \"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6\": rpc error: code = NotFound desc = could not find container \"32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6\": container with ID starting with 32c10a59f60d3b7df7ab15975b0c712648987bd38a8e006a287689499536aec6 not found: ID does not exist" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.003295 4728 scope.go:117] "RemoveContainer" containerID="fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4" Dec 05 11:33:37 crc kubenswrapper[4728]: E1205 11:33:37.003666 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4\": container with ID starting with fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4 not found: ID does not exist" containerID="fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.003759 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4"} err="failed to get container status \"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4\": rpc error: code = NotFound desc = could not find container \"fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4\": container with ID starting with fc66a672c69d56fce435073ed72bd768b6aabf9cea2ba6a323adcd58db4428b4 not found: ID does not exist" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.003848 4728 scope.go:117] "RemoveContainer" containerID="b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522" Dec 05 11:33:37 crc kubenswrapper[4728]: E1205 11:33:37.004212 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522\": container with ID starting with b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522 not found: ID does not exist" containerID="b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.004234 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522"} err="failed to get container status \"b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522\": rpc error: code = NotFound desc = could not find container \"b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522\": container with ID starting with b261c902bd128e626e60e24dee865ac8ca524c8863ffcc85b48396535e95f522 not found: ID does not exist" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.094851 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.094992 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095019 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zft9\" (UniqueName: \"kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095124 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095166 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095207 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.095912 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.096112 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.096183 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.096887 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.096905 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.097398 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.112173 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zft9\" (UniqueName: \"kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9\") pod \"dnsmasq-dns-5559d4f67f-7dsd9\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.163687 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.686423 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:33:37 crc kubenswrapper[4728]: I1205 11:33:37.909870 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" event={"ID":"90401a32-04f9-4809-b979-e113808359be","Type":"ContainerStarted","Data":"15f1522ec7b851146a3dc3c782216667f6520cafc9859469c710b4475bb82bae"} Dec 05 11:33:38 crc kubenswrapper[4728]: I1205 11:33:38.366427 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e89cf8d0-efb6-4c10-8ffd-061fc03079be" path="/var/lib/kubelet/pods/e89cf8d0-efb6-4c10-8ffd-061fc03079be/volumes" Dec 05 11:33:38 crc kubenswrapper[4728]: I1205 11:33:38.929859 4728 generic.go:334] "Generic (PLEG): container finished" podID="90401a32-04f9-4809-b979-e113808359be" containerID="9c8a13792f18542995ebd4d427b3e321e1f29370399ebc1f4b1073d66676481b" exitCode=0 Dec 05 11:33:38 crc kubenswrapper[4728]: I1205 11:33:38.929978 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" event={"ID":"90401a32-04f9-4809-b979-e113808359be","Type":"ContainerDied","Data":"9c8a13792f18542995ebd4d427b3e321e1f29370399ebc1f4b1073d66676481b"} Dec 05 11:33:38 crc kubenswrapper[4728]: I1205 11:33:38.934530 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"297a0136-c048-4d32-ae75-2691e2bb98b4","Type":"ContainerStarted","Data":"476513b7f06ef0f35191f57c87eb9351461d46542b941bb0627f96a78e761f60"} Dec 05 11:33:39 crc kubenswrapper[4728]: I1205 11:33:39.945283 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" event={"ID":"90401a32-04f9-4809-b979-e113808359be","Type":"ContainerStarted","Data":"661a5cd78d61a8fa0c111c0b5478c8315da31f60a11f6b2297ff9c05c118e29a"} Dec 05 11:33:39 crc kubenswrapper[4728]: I1205 11:33:39.984354 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" podStartSLOduration=3.98432599 podStartE2EDuration="3.98432599s" podCreationTimestamp="2025-12-05 11:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:33:39.968063968 +0000 UTC m=+1554.110186701" watchObservedRunningTime="2025-12-05 11:33:39.98432599 +0000 UTC m=+1554.126448723" Dec 05 11:33:40 crc kubenswrapper[4728]: I1205 11:33:40.961803 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.165086 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.274607 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.276723 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="dnsmasq-dns" containerID="cri-o://da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3" gracePeriod=10 Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.410187 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d99fc9df9-2cd8j"] Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.412479 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.428668 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d99fc9df9-2cd8j"] Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.521140 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.217:5353: connect: connection refused" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561495 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561649 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-config\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561674 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-svc\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561723 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561837 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.561863 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zfh9\" (UniqueName: \"kubernetes.io/projected/d49621d5-c052-4869-ace8-926a18cc570d-kube-api-access-4zfh9\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.562217 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666276 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666531 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666614 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-config\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666650 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-svc\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666711 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666852 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.666879 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zfh9\" (UniqueName: \"kubernetes.io/projected/d49621d5-c052-4869-ace8-926a18cc570d-kube-api-access-4zfh9\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.667341 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-config\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.667763 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.667968 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-svc\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.668060 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.668696 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.668779 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d49621d5-c052-4869-ace8-926a18cc570d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.687345 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zfh9\" (UniqueName: \"kubernetes.io/projected/d49621d5-c052-4869-ace8-926a18cc570d-kube-api-access-4zfh9\") pod \"dnsmasq-dns-5d99fc9df9-2cd8j\" (UID: \"d49621d5-c052-4869-ace8-926a18cc570d\") " pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.831262 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:47 crc kubenswrapper[4728]: I1205 11:33:47.912841 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.037735 4728 generic.go:334] "Generic (PLEG): container finished" podID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerID="da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3" exitCode=0 Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.037777 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" event={"ID":"3e8c1db4-4dcb-4500-a280-5ce6e96f855d","Type":"ContainerDied","Data":"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3"} Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.037817 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" event={"ID":"3e8c1db4-4dcb-4500-a280-5ce6e96f855d","Type":"ContainerDied","Data":"7cd464d620e0abb780fae3df1d00d2d13f50964443b21528d641b54e27b97220"} Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.037817 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b4c997d87-lb4tw" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.037835 4728 scope.go:117] "RemoveContainer" containerID="da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.066229 4728 scope.go:117] "RemoveContainer" containerID="ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.085951 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.086021 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmgvd\" (UniqueName: \"kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.086078 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.086782 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.086823 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.086854 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb\") pod \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\" (UID: \"3e8c1db4-4dcb-4500-a280-5ce6e96f855d\") " Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.092309 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd" (OuterVolumeSpecName: "kube-api-access-cmgvd") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "kube-api-access-cmgvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.098493 4728 scope.go:117] "RemoveContainer" containerID="da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3" Dec 05 11:33:48 crc kubenswrapper[4728]: E1205 11:33:48.099056 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3\": container with ID starting with da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3 not found: ID does not exist" containerID="da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.099120 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3"} err="failed to get container status \"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3\": rpc error: code = NotFound desc = could not find container \"da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3\": container with ID starting with da2273746e1babb155d448c6edf12ee13fbba3bf49be93725788d1ce3a1a55a3 not found: ID does not exist" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.099149 4728 scope.go:117] "RemoveContainer" containerID="ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63" Dec 05 11:33:48 crc kubenswrapper[4728]: E1205 11:33:48.099515 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63\": container with ID starting with ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63 not found: ID does not exist" containerID="ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.099553 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63"} err="failed to get container status \"ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63\": rpc error: code = NotFound desc = could not find container \"ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63\": container with ID starting with ea5282681cef0c4d03fff4288f1c50f40a7ca295c169baa78eb35ad3b802ee63 not found: ID does not exist" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.140264 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.143338 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config" (OuterVolumeSpecName: "config") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.147591 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.149363 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.156414 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e8c1db4-4dcb-4500-a280-5ce6e96f855d" (UID: "3e8c1db4-4dcb-4500-a280-5ce6e96f855d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190179 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190216 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmgvd\" (UniqueName: \"kubernetes.io/projected/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-kube-api-access-cmgvd\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190226 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190239 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190247 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.190255 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e8c1db4-4dcb-4500-a280-5ce6e96f855d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.383308 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d99fc9df9-2cd8j"] Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.411420 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:33:48 crc kubenswrapper[4728]: I1205 11:33:48.418710 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b4c997d87-lb4tw"] Dec 05 11:33:49 crc kubenswrapper[4728]: I1205 11:33:49.052708 4728 generic.go:334] "Generic (PLEG): container finished" podID="d49621d5-c052-4869-ace8-926a18cc570d" containerID="2a3308aa16ad7d5f9923524da4e10f7109c729e214755fa69c10db943df02627" exitCode=0 Dec 05 11:33:49 crc kubenswrapper[4728]: I1205 11:33:49.052780 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" event={"ID":"d49621d5-c052-4869-ace8-926a18cc570d","Type":"ContainerDied","Data":"2a3308aa16ad7d5f9923524da4e10f7109c729e214755fa69c10db943df02627"} Dec 05 11:33:49 crc kubenswrapper[4728]: I1205 11:33:49.052856 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" event={"ID":"d49621d5-c052-4869-ace8-926a18cc570d","Type":"ContainerStarted","Data":"172d990b112e5b1422b60539d0f83a4764d40df07ba81d13c2c0b4db69aa6b14"} Dec 05 11:33:50 crc kubenswrapper[4728]: I1205 11:33:50.069984 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" event={"ID":"d49621d5-c052-4869-ace8-926a18cc570d","Type":"ContainerStarted","Data":"4f1c10c9c549ca2647f02b6e495d10be9c209b2510ed5b2a599fb1b51ce138b9"} Dec 05 11:33:50 crc kubenswrapper[4728]: I1205 11:33:50.071599 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:50 crc kubenswrapper[4728]: I1205 11:33:50.110373 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" podStartSLOduration=3.110351413 podStartE2EDuration="3.110351413s" podCreationTimestamp="2025-12-05 11:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:33:50.095828875 +0000 UTC m=+1564.237951578" watchObservedRunningTime="2025-12-05 11:33:50.110351413 +0000 UTC m=+1564.252474126" Dec 05 11:33:50 crc kubenswrapper[4728]: I1205 11:33:50.368514 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" path="/var/lib/kubelet/pods/3e8c1db4-4dcb-4500-a280-5ce6e96f855d/volumes" Dec 05 11:33:57 crc kubenswrapper[4728]: I1205 11:33:57.833042 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d99fc9df9-2cd8j" Dec 05 11:33:57 crc kubenswrapper[4728]: I1205 11:33:57.950140 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:33:57 crc kubenswrapper[4728]: I1205 11:33:57.950361 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="dnsmasq-dns" containerID="cri-o://661a5cd78d61a8fa0c111c0b5478c8315da31f60a11f6b2297ff9c05c118e29a" gracePeriod=10 Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.172709 4728 generic.go:334] "Generic (PLEG): container finished" podID="90401a32-04f9-4809-b979-e113808359be" containerID="661a5cd78d61a8fa0c111c0b5478c8315da31f60a11f6b2297ff9c05c118e29a" exitCode=0 Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.173020 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" event={"ID":"90401a32-04f9-4809-b979-e113808359be","Type":"ContainerDied","Data":"661a5cd78d61a8fa0c111c0b5478c8315da31f60a11f6b2297ff9c05c118e29a"} Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.432538 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.620374 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.620506 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.620559 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zft9\" (UniqueName: \"kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.620613 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.620880 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.621877 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.621933 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb\") pod \"90401a32-04f9-4809-b979-e113808359be\" (UID: \"90401a32-04f9-4809-b979-e113808359be\") " Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.627056 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9" (OuterVolumeSpecName: "kube-api-access-5zft9") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "kube-api-access-5zft9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.686390 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.689075 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.698021 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.705469 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config" (OuterVolumeSpecName: "config") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.717202 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724259 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724291 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zft9\" (UniqueName: \"kubernetes.io/projected/90401a32-04f9-4809-b979-e113808359be-kube-api-access-5zft9\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724309 4728 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724324 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724338 4728 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724350 4728 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.724896 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "90401a32-04f9-4809-b979-e113808359be" (UID: "90401a32-04f9-4809-b979-e113808359be"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:33:58 crc kubenswrapper[4728]: I1205 11:33:58.826471 4728 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90401a32-04f9-4809-b979-e113808359be-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.190948 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" event={"ID":"90401a32-04f9-4809-b979-e113808359be","Type":"ContainerDied","Data":"15f1522ec7b851146a3dc3c782216667f6520cafc9859469c710b4475bb82bae"} Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.191043 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5559d4f67f-7dsd9" Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.191233 4728 scope.go:117] "RemoveContainer" containerID="661a5cd78d61a8fa0c111c0b5478c8315da31f60a11f6b2297ff9c05c118e29a" Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.224886 4728 scope.go:117] "RemoveContainer" containerID="9c8a13792f18542995ebd4d427b3e321e1f29370399ebc1f4b1073d66676481b" Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.253774 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:33:59 crc kubenswrapper[4728]: I1205 11:33:59.265967 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5559d4f67f-7dsd9"] Dec 05 11:34:00 crc kubenswrapper[4728]: I1205 11:34:00.369735 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90401a32-04f9-4809-b979-e113808359be" path="/var/lib/kubelet/pods/90401a32-04f9-4809-b979-e113808359be/volumes" Dec 05 11:34:09 crc kubenswrapper[4728]: I1205 11:34:09.336752 4728 generic.go:334] "Generic (PLEG): container finished" podID="9f981b7a-7de1-4ce3-ae26-5693c659923d" containerID="22a275ee086a57d83d14e02d3b9f0b46297e03690cc7d1ed65246a7eded263e1" exitCode=0 Dec 05 11:34:09 crc kubenswrapper[4728]: I1205 11:34:09.336863 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f981b7a-7de1-4ce3-ae26-5693c659923d","Type":"ContainerDied","Data":"22a275ee086a57d83d14e02d3b9f0b46297e03690cc7d1ed65246a7eded263e1"} Dec 05 11:34:10 crc kubenswrapper[4728]: I1205 11:34:10.350259 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9f981b7a-7de1-4ce3-ae26-5693c659923d","Type":"ContainerStarted","Data":"a38a11480dc321bb16ed1038e60ca6e4d0709bf810bd7262f98bc45f3c653b97"} Dec 05 11:34:10 crc kubenswrapper[4728]: I1205 11:34:10.365564 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 11:34:10 crc kubenswrapper[4728]: I1205 11:34:10.382494 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.382470129 podStartE2EDuration="36.382470129s" podCreationTimestamp="2025-12-05 11:33:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:34:10.372525057 +0000 UTC m=+1584.514647770" watchObservedRunningTime="2025-12-05 11:34:10.382470129 +0000 UTC m=+1584.524592832" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073139 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt"] Dec 05 11:34:11 crc kubenswrapper[4728]: E1205 11:34:11.073612 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="init" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073634 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="init" Dec 05 11:34:11 crc kubenswrapper[4728]: E1205 11:34:11.073651 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073662 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: E1205 11:34:11.073684 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073693 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: E1205 11:34:11.073720 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="init" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073728 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="init" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073951 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e8c1db4-4dcb-4500-a280-5ce6e96f855d" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.073981 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="90401a32-04f9-4809-b979-e113808359be" containerName="dnsmasq-dns" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.074838 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.077436 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.077629 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.077744 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.077886 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.092559 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt"] Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.223033 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.223726 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.224351 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lqch\" (UniqueName: \"kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.224631 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.327869 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lqch\" (UniqueName: \"kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.328011 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.328127 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.328186 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.334686 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.337310 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.341259 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.343422 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lqch\" (UniqueName: \"kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.362675 4728 generic.go:334] "Generic (PLEG): container finished" podID="297a0136-c048-4d32-ae75-2691e2bb98b4" containerID="476513b7f06ef0f35191f57c87eb9351461d46542b941bb0627f96a78e761f60" exitCode=0 Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.362769 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"297a0136-c048-4d32-ae75-2691e2bb98b4","Type":"ContainerDied","Data":"476513b7f06ef0f35191f57c87eb9351461d46542b941bb0627f96a78e761f60"} Dec 05 11:34:11 crc kubenswrapper[4728]: I1205 11:34:11.394649 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:12 crc kubenswrapper[4728]: I1205 11:34:12.112164 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt"] Dec 05 11:34:12 crc kubenswrapper[4728]: I1205 11:34:12.376100 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" event={"ID":"12c3ce02-598e-48b2-b81c-7f80d3589de4","Type":"ContainerStarted","Data":"06de0645fc44a77dcf0cbf1011aefd85268b631865fe29272c21a553ea8af761"} Dec 05 11:34:12 crc kubenswrapper[4728]: I1205 11:34:12.378613 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"297a0136-c048-4d32-ae75-2691e2bb98b4","Type":"ContainerStarted","Data":"5167099761f81a481c31dbf7a6f0e89bbb4efaea00901f7025fb373eef799210"} Dec 05 11:34:12 crc kubenswrapper[4728]: I1205 11:34:12.378869 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:34:12 crc kubenswrapper[4728]: I1205 11:34:12.414106 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.414089094 podStartE2EDuration="37.414089094s" podCreationTimestamp="2025-12-05 11:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:34:12.409633351 +0000 UTC m=+1586.551756054" watchObservedRunningTime="2025-12-05 11:34:12.414089094 +0000 UTC m=+1586.556211777" Dec 05 11:34:23 crc kubenswrapper[4728]: I1205 11:34:23.509294 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" event={"ID":"12c3ce02-598e-48b2-b81c-7f80d3589de4","Type":"ContainerStarted","Data":"a45dd77747b20323ea75d5808752893113b1a5638d035d12f50092207f622e70"} Dec 05 11:34:23 crc kubenswrapper[4728]: I1205 11:34:23.546765 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" podStartSLOduration=2.151049524 podStartE2EDuration="12.546738173s" podCreationTimestamp="2025-12-05 11:34:11 +0000 UTC" firstStartedPulling="2025-12-05 11:34:12.102697661 +0000 UTC m=+1586.244820354" lastFinishedPulling="2025-12-05 11:34:22.49838632 +0000 UTC m=+1596.640509003" observedRunningTime="2025-12-05 11:34:23.526952891 +0000 UTC m=+1597.669075594" watchObservedRunningTime="2025-12-05 11:34:23.546738173 +0000 UTC m=+1597.688860906" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.528142 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.601086 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.604517 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.620590 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.728240 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk2fk\" (UniqueName: \"kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.728347 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.728376 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.830304 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.830347 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.830480 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk2fk\" (UniqueName: \"kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.830938 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.831002 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.850957 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk2fk\" (UniqueName: \"kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk\") pod \"certified-operators-2t2n4\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:24 crc kubenswrapper[4728]: I1205 11:34:24.946870 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:25 crc kubenswrapper[4728]: W1205 11:34:25.568497 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1885fbe0_6acc_484b_948e_0511fbee36e2.slice/crio-cfccd6905b90981d73e55487f366d891330bdd4c90b450bf654a064515d62385 WatchSource:0}: Error finding container cfccd6905b90981d73e55487f366d891330bdd4c90b450bf654a064515d62385: Status 404 returned error can't find the container with id cfccd6905b90981d73e55487f366d891330bdd4c90b450bf654a064515d62385 Dec 05 11:34:25 crc kubenswrapper[4728]: I1205 11:34:25.571733 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:25 crc kubenswrapper[4728]: I1205 11:34:25.582931 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:34:25 crc kubenswrapper[4728]: I1205 11:34:25.702029 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:34:25 crc kubenswrapper[4728]: I1205 11:34:25.702081 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:34:26 crc kubenswrapper[4728]: I1205 11:34:26.539867 4728 generic.go:334] "Generic (PLEG): container finished" podID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerID="72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed" exitCode=0 Dec 05 11:34:26 crc kubenswrapper[4728]: I1205 11:34:26.539969 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerDied","Data":"72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed"} Dec 05 11:34:26 crc kubenswrapper[4728]: I1205 11:34:26.540723 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerStarted","Data":"cfccd6905b90981d73e55487f366d891330bdd4c90b450bf654a064515d62385"} Dec 05 11:34:30 crc kubenswrapper[4728]: I1205 11:34:30.573974 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerStarted","Data":"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b"} Dec 05 11:34:31 crc kubenswrapper[4728]: I1205 11:34:31.588932 4728 generic.go:334] "Generic (PLEG): container finished" podID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerID="40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b" exitCode=0 Dec 05 11:34:31 crc kubenswrapper[4728]: I1205 11:34:31.589005 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerDied","Data":"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b"} Dec 05 11:34:32 crc kubenswrapper[4728]: I1205 11:34:32.606423 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerStarted","Data":"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb"} Dec 05 11:34:32 crc kubenswrapper[4728]: I1205 11:34:32.628360 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2t2n4" podStartSLOduration=3.213134254 podStartE2EDuration="8.628333542s" podCreationTimestamp="2025-12-05 11:34:24 +0000 UTC" firstStartedPulling="2025-12-05 11:34:26.541653824 +0000 UTC m=+1600.683776517" lastFinishedPulling="2025-12-05 11:34:31.956853102 +0000 UTC m=+1606.098975805" observedRunningTime="2025-12-05 11:34:32.624735351 +0000 UTC m=+1606.766858084" watchObservedRunningTime="2025-12-05 11:34:32.628333542 +0000 UTC m=+1606.770456245" Dec 05 11:34:34 crc kubenswrapper[4728]: I1205 11:34:34.626844 4728 generic.go:334] "Generic (PLEG): container finished" podID="12c3ce02-598e-48b2-b81c-7f80d3589de4" containerID="a45dd77747b20323ea75d5808752893113b1a5638d035d12f50092207f622e70" exitCode=0 Dec 05 11:34:34 crc kubenswrapper[4728]: I1205 11:34:34.626928 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" event={"ID":"12c3ce02-598e-48b2-b81c-7f80d3589de4","Type":"ContainerDied","Data":"a45dd77747b20323ea75d5808752893113b1a5638d035d12f50092207f622e70"} Dec 05 11:34:34 crc kubenswrapper[4728]: I1205 11:34:34.948020 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:34 crc kubenswrapper[4728]: I1205 11:34:34.948408 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:34 crc kubenswrapper[4728]: I1205 11:34:34.993784 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.095628 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.185513 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle\") pod \"12c3ce02-598e-48b2-b81c-7f80d3589de4\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.185561 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key\") pod \"12c3ce02-598e-48b2-b81c-7f80d3589de4\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.185709 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lqch\" (UniqueName: \"kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch\") pod \"12c3ce02-598e-48b2-b81c-7f80d3589de4\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.185780 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory\") pod \"12c3ce02-598e-48b2-b81c-7f80d3589de4\" (UID: \"12c3ce02-598e-48b2-b81c-7f80d3589de4\") " Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.193013 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch" (OuterVolumeSpecName: "kube-api-access-8lqch") pod "12c3ce02-598e-48b2-b81c-7f80d3589de4" (UID: "12c3ce02-598e-48b2-b81c-7f80d3589de4"). InnerVolumeSpecName "kube-api-access-8lqch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.193978 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "12c3ce02-598e-48b2-b81c-7f80d3589de4" (UID: "12c3ce02-598e-48b2-b81c-7f80d3589de4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.217041 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "12c3ce02-598e-48b2-b81c-7f80d3589de4" (UID: "12c3ce02-598e-48b2-b81c-7f80d3589de4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.224658 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory" (OuterVolumeSpecName: "inventory") pod "12c3ce02-598e-48b2-b81c-7f80d3589de4" (UID: "12c3ce02-598e-48b2-b81c-7f80d3589de4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.289235 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lqch\" (UniqueName: \"kubernetes.io/projected/12c3ce02-598e-48b2-b81c-7f80d3589de4-kube-api-access-8lqch\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.289283 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.289297 4728 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.289311 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12c3ce02-598e-48b2-b81c-7f80d3589de4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.401033 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9sklm"] Dec 05 11:34:36 crc kubenswrapper[4728]: E1205 11:34:36.402631 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c3ce02-598e-48b2-b81c-7f80d3589de4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.402850 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c3ce02-598e-48b2-b81c-7f80d3589de4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.403495 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="12c3ce02-598e-48b2-b81c-7f80d3589de4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.411028 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.414103 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9sklm"] Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.492623 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-utilities\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.493269 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwcmt\" (UniqueName: \"kubernetes.io/projected/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-kube-api-access-jwcmt\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.493324 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-catalog-content\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.595437 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwcmt\" (UniqueName: \"kubernetes.io/projected/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-kube-api-access-jwcmt\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.595508 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-catalog-content\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.595542 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-utilities\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.596255 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-utilities\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.596308 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-catalog-content\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.617552 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwcmt\" (UniqueName: \"kubernetes.io/projected/4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2-kube-api-access-jwcmt\") pod \"redhat-marketplace-9sklm\" (UID: \"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2\") " pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.650374 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.650929 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt" event={"ID":"12c3ce02-598e-48b2-b81c-7f80d3589de4","Type":"ContainerDied","Data":"06de0645fc44a77dcf0cbf1011aefd85268b631865fe29272c21a553ea8af761"} Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.650957 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06de0645fc44a77dcf0cbf1011aefd85268b631865fe29272c21a553ea8af761" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.731457 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.740070 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7"] Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.741672 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.744753 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.744849 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.745005 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.745054 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.765902 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7"] Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.800669 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.800713 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.800841 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kglvb\" (UniqueName: \"kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.903923 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kglvb\" (UniqueName: \"kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.904082 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.904114 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.911682 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.915026 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:36 crc kubenswrapper[4728]: I1205 11:34:36.931596 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kglvb\" (UniqueName: \"kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m6vb7\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.069978 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:37 crc kubenswrapper[4728]: W1205 11:34:37.243203 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f2ed4d0_156e_4dca_ad81_dc56dbb3b8c2.slice/crio-8c66b10eb0f21e6fd645e4d9dd1bc7930ef4ac1b213143c6d5249acfd9b996da WatchSource:0}: Error finding container 8c66b10eb0f21e6fd645e4d9dd1bc7930ef4ac1b213143c6d5249acfd9b996da: Status 404 returned error can't find the container with id 8c66b10eb0f21e6fd645e4d9dd1bc7930ef4ac1b213143c6d5249acfd9b996da Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.253504 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9sklm"] Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.624813 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7"] Dec 05 11:34:37 crc kubenswrapper[4728]: W1205 11:34:37.629158 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda97a24e6_7ec7_48ea_8dcb_bc6c72a64f67.slice/crio-b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9 WatchSource:0}: Error finding container b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9: Status 404 returned error can't find the container with id b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9 Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.662501 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" event={"ID":"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67","Type":"ContainerStarted","Data":"b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9"} Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.664086 4728 generic.go:334] "Generic (PLEG): container finished" podID="4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2" containerID="253499e0d23bf425d30b67c1a39f603b06f3c1936d847859f50de90eb1e89cda" exitCode=0 Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.664114 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9sklm" event={"ID":"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2","Type":"ContainerDied","Data":"253499e0d23bf425d30b67c1a39f603b06f3c1936d847859f50de90eb1e89cda"} Dec 05 11:34:37 crc kubenswrapper[4728]: I1205 11:34:37.664132 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9sklm" event={"ID":"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2","Type":"ContainerStarted","Data":"8c66b10eb0f21e6fd645e4d9dd1bc7930ef4ac1b213143c6d5249acfd9b996da"} Dec 05 11:34:38 crc kubenswrapper[4728]: I1205 11:34:38.679282 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" event={"ID":"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67","Type":"ContainerStarted","Data":"d88c8e7017280318a9cdd9ea401c11d2a8292feaaaab46de055cfed6a3976b8b"} Dec 05 11:34:38 crc kubenswrapper[4728]: I1205 11:34:38.711898 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" podStartSLOduration=2.344850277 podStartE2EDuration="2.711875521s" podCreationTimestamp="2025-12-05 11:34:36 +0000 UTC" firstStartedPulling="2025-12-05 11:34:37.632637506 +0000 UTC m=+1611.774760189" lastFinishedPulling="2025-12-05 11:34:37.99966273 +0000 UTC m=+1612.141785433" observedRunningTime="2025-12-05 11:34:38.696945403 +0000 UTC m=+1612.839068116" watchObservedRunningTime="2025-12-05 11:34:38.711875521 +0000 UTC m=+1612.853998224" Dec 05 11:34:42 crc kubenswrapper[4728]: I1205 11:34:42.721377 4728 generic.go:334] "Generic (PLEG): container finished" podID="4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2" containerID="19c6f10cd049e8f51bdb5d0590bed9284fa957f25a0d8879c3064bc0094f53a8" exitCode=0 Dec 05 11:34:42 crc kubenswrapper[4728]: I1205 11:34:42.721531 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9sklm" event={"ID":"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2","Type":"ContainerDied","Data":"19c6f10cd049e8f51bdb5d0590bed9284fa957f25a0d8879c3064bc0094f53a8"} Dec 05 11:34:42 crc kubenswrapper[4728]: I1205 11:34:42.723614 4728 generic.go:334] "Generic (PLEG): container finished" podID="a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" containerID="d88c8e7017280318a9cdd9ea401c11d2a8292feaaaab46de055cfed6a3976b8b" exitCode=0 Dec 05 11:34:42 crc kubenswrapper[4728]: I1205 11:34:42.723667 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" event={"ID":"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67","Type":"ContainerDied","Data":"d88c8e7017280318a9cdd9ea401c11d2a8292feaaaab46de055cfed6a3976b8b"} Dec 05 11:34:43 crc kubenswrapper[4728]: I1205 11:34:43.733899 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9sklm" event={"ID":"4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2","Type":"ContainerStarted","Data":"26de113933dbe7d2ae28e204d17d35f906750080573e9a5c429ea9b0407ed212"} Dec 05 11:34:43 crc kubenswrapper[4728]: I1205 11:34:43.757059 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9sklm" podStartSLOduration=2.3129399299999998 podStartE2EDuration="7.757041141s" podCreationTimestamp="2025-12-05 11:34:36 +0000 UTC" firstStartedPulling="2025-12-05 11:34:37.665639573 +0000 UTC m=+1611.807762266" lastFinishedPulling="2025-12-05 11:34:43.109740764 +0000 UTC m=+1617.251863477" observedRunningTime="2025-12-05 11:34:43.747835957 +0000 UTC m=+1617.889958670" watchObservedRunningTime="2025-12-05 11:34:43.757041141 +0000 UTC m=+1617.899163824" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.210323 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.275193 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory\") pod \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.275539 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kglvb\" (UniqueName: \"kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb\") pod \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.275673 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key\") pod \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\" (UID: \"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67\") " Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.287436 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb" (OuterVolumeSpecName: "kube-api-access-kglvb") pod "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" (UID: "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67"). InnerVolumeSpecName "kube-api-access-kglvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.310006 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" (UID: "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.340750 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory" (OuterVolumeSpecName: "inventory") pod "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" (UID: "a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.377486 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kglvb\" (UniqueName: \"kubernetes.io/projected/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-kube-api-access-kglvb\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.377572 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.377584 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:44 crc kubenswrapper[4728]: E1205 11:34:44.521140 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda97a24e6_7ec7_48ea_8dcb_bc6c72a64f67.slice\": RecentStats: unable to find data in memory cache]" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.747902 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.747902 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m6vb7" event={"ID":"a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67","Type":"ContainerDied","Data":"b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9"} Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.748050 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2305100de034081bcaa2b3bc33cedd319c11045be57ea4d1cc1cfa1d0f90bd9" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.850810 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82"] Dec 05 11:34:44 crc kubenswrapper[4728]: E1205 11:34:44.851607 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.851637 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.851910 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.852685 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.854624 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.855083 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.855581 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.856083 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.867676 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82"] Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.991359 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.991431 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwppl\" (UniqueName: \"kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.991472 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:44 crc kubenswrapper[4728]: I1205 11:34:44.991737 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.007718 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.069239 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.093175 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwppl\" (UniqueName: \"kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.093241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.093322 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.093397 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.098053 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.098086 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.098053 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.108515 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwppl\" (UniqueName: \"kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.185953 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.757727 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2t2n4" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="registry-server" containerID="cri-o://909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb" gracePeriod=2 Dec 05 11:34:45 crc kubenswrapper[4728]: I1205 11:34:45.776536 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82"] Dec 05 11:34:45 crc kubenswrapper[4728]: W1205 11:34:45.778270 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf81cfab_dfe5_4ebb_87aa_ff462cd3d1f9.slice/crio-d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa WatchSource:0}: Error finding container d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa: Status 404 returned error can't find the container with id d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.325813 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.425579 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk2fk\" (UniqueName: \"kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk\") pod \"1885fbe0-6acc-484b-948e-0511fbee36e2\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.425706 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities\") pod \"1885fbe0-6acc-484b-948e-0511fbee36e2\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.425764 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content\") pod \"1885fbe0-6acc-484b-948e-0511fbee36e2\" (UID: \"1885fbe0-6acc-484b-948e-0511fbee36e2\") " Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.426429 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities" (OuterVolumeSpecName: "utilities") pod "1885fbe0-6acc-484b-948e-0511fbee36e2" (UID: "1885fbe0-6acc-484b-948e-0511fbee36e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.434068 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk" (OuterVolumeSpecName: "kube-api-access-jk2fk") pod "1885fbe0-6acc-484b-948e-0511fbee36e2" (UID: "1885fbe0-6acc-484b-948e-0511fbee36e2"). InnerVolumeSpecName "kube-api-access-jk2fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.470826 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1885fbe0-6acc-484b-948e-0511fbee36e2" (UID: "1885fbe0-6acc-484b-948e-0511fbee36e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.528647 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.528682 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1885fbe0-6acc-484b-948e-0511fbee36e2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.528698 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk2fk\" (UniqueName: \"kubernetes.io/projected/1885fbe0-6acc-484b-948e-0511fbee36e2-kube-api-access-jk2fk\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.732549 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.733003 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.780846 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" event={"ID":"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9","Type":"ContainerStarted","Data":"d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa"} Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.783963 4728 generic.go:334] "Generic (PLEG): container finished" podID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerID="909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb" exitCode=0 Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.784042 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerDied","Data":"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb"} Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.784050 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2t2n4" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.784103 4728 scope.go:117] "RemoveContainer" containerID="909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.784085 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2t2n4" event={"ID":"1885fbe0-6acc-484b-948e-0511fbee36e2","Type":"ContainerDied","Data":"cfccd6905b90981d73e55487f366d891330bdd4c90b450bf654a064515d62385"} Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.797411 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.820003 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.835042 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2t2n4"] Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.837475 4728 scope.go:117] "RemoveContainer" containerID="40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.868580 4728 scope.go:117] "RemoveContainer" containerID="72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.907374 4728 scope.go:117] "RemoveContainer" containerID="909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb" Dec 05 11:34:46 crc kubenswrapper[4728]: E1205 11:34:46.907996 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb\": container with ID starting with 909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb not found: ID does not exist" containerID="909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.908045 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb"} err="failed to get container status \"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb\": rpc error: code = NotFound desc = could not find container \"909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb\": container with ID starting with 909f06638f256e1e2c4ffba500ead70ce57c0399906d2de7492d5616bc28ffcb not found: ID does not exist" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.908074 4728 scope.go:117] "RemoveContainer" containerID="40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b" Dec 05 11:34:46 crc kubenswrapper[4728]: E1205 11:34:46.908644 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b\": container with ID starting with 40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b not found: ID does not exist" containerID="40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.908683 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b"} err="failed to get container status \"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b\": rpc error: code = NotFound desc = could not find container \"40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b\": container with ID starting with 40be26d95bef7fbb252fe6f11e2e2778572f0557f26d39319e55f7aac177262b not found: ID does not exist" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.908710 4728 scope.go:117] "RemoveContainer" containerID="72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed" Dec 05 11:34:46 crc kubenswrapper[4728]: E1205 11:34:46.909165 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed\": container with ID starting with 72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed not found: ID does not exist" containerID="72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed" Dec 05 11:34:46 crc kubenswrapper[4728]: I1205 11:34:46.909198 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed"} err="failed to get container status \"72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed\": rpc error: code = NotFound desc = could not find container \"72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed\": container with ID starting with 72180cb661b6d65396e04eb1647b1ef0353e79aab9e0021c9b947d398323c5ed not found: ID does not exist" Dec 05 11:34:47 crc kubenswrapper[4728]: I1205 11:34:47.991029 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:34:48 crc kubenswrapper[4728]: I1205 11:34:48.363613 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" path="/var/lib/kubelet/pods/1885fbe0-6acc-484b-948e-0511fbee36e2/volumes" Dec 05 11:34:48 crc kubenswrapper[4728]: I1205 11:34:48.812076 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" event={"ID":"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9","Type":"ContainerStarted","Data":"37645fa0b7b2e610d10fac00732af45b92c551beb8627d3df53d53b2e81e4d2b"} Dec 05 11:34:48 crc kubenswrapper[4728]: I1205 11:34:48.876489 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" podStartSLOduration=2.672698224 podStartE2EDuration="4.876458413s" podCreationTimestamp="2025-12-05 11:34:44 +0000 UTC" firstStartedPulling="2025-12-05 11:34:45.78368778 +0000 UTC m=+1619.925810493" lastFinishedPulling="2025-12-05 11:34:47.987447999 +0000 UTC m=+1622.129570682" observedRunningTime="2025-12-05 11:34:48.834471998 +0000 UTC m=+1622.976594721" watchObservedRunningTime="2025-12-05 11:34:48.876458413 +0000 UTC m=+1623.018581136" Dec 05 11:34:55 crc kubenswrapper[4728]: I1205 11:34:55.702549 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:34:55 crc kubenswrapper[4728]: I1205 11:34:55.703309 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:34:56 crc kubenswrapper[4728]: I1205 11:34:56.792130 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9sklm" Dec 05 11:34:56 crc kubenswrapper[4728]: I1205 11:34:56.986770 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9sklm"] Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.049005 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.049409 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4wpqx" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="registry-server" containerID="cri-o://2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" gracePeriod=2 Dec 05 11:34:57 crc kubenswrapper[4728]: E1205 11:34:57.250809 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923 is running failed: container process not found" containerID="2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:34:57 crc kubenswrapper[4728]: E1205 11:34:57.251310 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923 is running failed: container process not found" containerID="2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:34:57 crc kubenswrapper[4728]: E1205 11:34:57.251585 4728 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923 is running failed: container process not found" containerID="2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 11:34:57 crc kubenswrapper[4728]: E1205 11:34:57.251669 4728 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-4wpqx" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="registry-server" Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.920599 4728 generic.go:334] "Generic (PLEG): container finished" podID="3566a4b3-0657-4221-9536-cfebc1b59376" containerID="2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" exitCode=0 Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.920648 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerDied","Data":"2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923"} Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.944752 4728 scope.go:117] "RemoveContainer" containerID="27d470dcc7679014f2b45cedab1436309956d85f523da34eb67469b51e0bc713" Dec 05 11:34:57 crc kubenswrapper[4728]: I1205 11:34:57.964739 4728 scope.go:117] "RemoveContainer" containerID="47ef8f7fa2d3a2d5c869bccc2658eed49d4bf5dc79b6d2dca95cda5443c3da52" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.114828 4728 scope.go:117] "RemoveContainer" containerID="2708625e0fb8f19339b47c8fd505026f6227bbeb03e13b88299420c7f5747f22" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.143832 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.172881 4728 scope.go:117] "RemoveContainer" containerID="ffa3e9fa28a1594b3e7c3cf60abf37d1bddbd19b06abbe6269dbcee71500d7d9" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.209661 4728 scope.go:117] "RemoveContainer" containerID="ddc748730de7d3e6670967eeb8de48ec58cea8c546f23933905ed5cdf4fece03" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.283546 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jmjl\" (UniqueName: \"kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl\") pod \"3566a4b3-0657-4221-9536-cfebc1b59376\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.291018 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities\") pod \"3566a4b3-0657-4221-9536-cfebc1b59376\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.291306 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content\") pod \"3566a4b3-0657-4221-9536-cfebc1b59376\" (UID: \"3566a4b3-0657-4221-9536-cfebc1b59376\") " Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.292241 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities" (OuterVolumeSpecName: "utilities") pod "3566a4b3-0657-4221-9536-cfebc1b59376" (UID: "3566a4b3-0657-4221-9536-cfebc1b59376"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.292484 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl" (OuterVolumeSpecName: "kube-api-access-5jmjl") pod "3566a4b3-0657-4221-9536-cfebc1b59376" (UID: "3566a4b3-0657-4221-9536-cfebc1b59376"). InnerVolumeSpecName "kube-api-access-5jmjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.293606 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jmjl\" (UniqueName: \"kubernetes.io/projected/3566a4b3-0657-4221-9536-cfebc1b59376-kube-api-access-5jmjl\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.293631 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.315383 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3566a4b3-0657-4221-9536-cfebc1b59376" (UID: "3566a4b3-0657-4221-9536-cfebc1b59376"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.394992 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3566a4b3-0657-4221-9536-cfebc1b59376-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.931997 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4wpqx" event={"ID":"3566a4b3-0657-4221-9536-cfebc1b59376","Type":"ContainerDied","Data":"9b4acb170e880e23133db45a152de46b27363611b5e6eb3dbcbdd7e6cad3fb05"} Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.932055 4728 scope.go:117] "RemoveContainer" containerID="2208171bdd56cd8ac4c91d7c590ab88c814d7c9f8b714262de119cac270a1923" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.932969 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4wpqx" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.954831 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.958621 4728 scope.go:117] "RemoveContainer" containerID="bb8e6c9de21d39fc39186d379975f58bece1c1c8a67fd7b3c06fdc600327175d" Dec 05 11:34:58 crc kubenswrapper[4728]: I1205 11:34:58.983445 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4wpqx"] Dec 05 11:35:00 crc kubenswrapper[4728]: I1205 11:35:00.364475 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" path="/var/lib/kubelet/pods/3566a4b3-0657-4221-9536-cfebc1b59376/volumes" Dec 05 11:35:25 crc kubenswrapper[4728]: I1205 11:35:25.701653 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:35:25 crc kubenswrapper[4728]: I1205 11:35:25.702250 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:35:25 crc kubenswrapper[4728]: I1205 11:35:25.702303 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:35:25 crc kubenswrapper[4728]: I1205 11:35:25.703082 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:35:25 crc kubenswrapper[4728]: I1205 11:35:25.703134 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" gracePeriod=600 Dec 05 11:35:25 crc kubenswrapper[4728]: E1205 11:35:25.834810 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:35:26 crc kubenswrapper[4728]: I1205 11:35:26.220169 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" exitCode=0 Dec 05 11:35:26 crc kubenswrapper[4728]: I1205 11:35:26.220211 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e"} Dec 05 11:35:26 crc kubenswrapper[4728]: I1205 11:35:26.220241 4728 scope.go:117] "RemoveContainer" containerID="f07ec3293f2cf410293b607034ef0c1092d5f4131c09f2ae85161f9d724cfe26" Dec 05 11:35:26 crc kubenswrapper[4728]: I1205 11:35:26.221312 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:35:26 crc kubenswrapper[4728]: E1205 11:35:26.221940 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:35:40 crc kubenswrapper[4728]: I1205 11:35:40.353084 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:35:40 crc kubenswrapper[4728]: E1205 11:35:40.354579 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:35:51 crc kubenswrapper[4728]: I1205 11:35:51.352621 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:35:51 crc kubenswrapper[4728]: E1205 11:35:51.353524 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:35:58 crc kubenswrapper[4728]: I1205 11:35:58.374646 4728 scope.go:117] "RemoveContainer" containerID="8d00f641b64e0dc77217bfa3f3b6e0e64613764b19e41a5bb7bf5bc57fefb456" Dec 05 11:36:05 crc kubenswrapper[4728]: I1205 11:36:05.352403 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:36:05 crc kubenswrapper[4728]: E1205 11:36:05.353502 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:36:18 crc kubenswrapper[4728]: I1205 11:36:18.352435 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:36:18 crc kubenswrapper[4728]: E1205 11:36:18.353267 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:36:32 crc kubenswrapper[4728]: I1205 11:36:32.353779 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:36:32 crc kubenswrapper[4728]: E1205 11:36:32.354755 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:36:45 crc kubenswrapper[4728]: I1205 11:36:45.352428 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:36:45 crc kubenswrapper[4728]: E1205 11:36:45.353551 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.470914 4728 scope.go:117] "RemoveContainer" containerID="652f04c78f6b6b46fd9dc695423eec317fb5cf018fd85fca0d78016a62b747f5" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.500013 4728 scope.go:117] "RemoveContainer" containerID="6f6a28abdca56c20264691e4ade4b122fdbe08a6d995e8a70b343e5f887a512c" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.527286 4728 scope.go:117] "RemoveContainer" containerID="ab74e70e8fd0007fdf95b3b591eb20574d53752908e920d12a501578810735db" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.558005 4728 scope.go:117] "RemoveContainer" containerID="6dd9d3a34dedd9407e02a156620c7d206a6463ca97d95c323314e1a916f07809" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.591218 4728 scope.go:117] "RemoveContainer" containerID="a17cdf0d6d59bb47b7c1c9d368a73bee178275f3c086c669fa938b563c01a115" Dec 05 11:36:58 crc kubenswrapper[4728]: I1205 11:36:58.617219 4728 scope.go:117] "RemoveContainer" containerID="892799e7cfa71a099c9020b214b671957a638cacad6b70b2f97af52abf6e69b2" Dec 05 11:36:59 crc kubenswrapper[4728]: I1205 11:36:59.353069 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:36:59 crc kubenswrapper[4728]: E1205 11:36:59.353508 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:37:13 crc kubenswrapper[4728]: I1205 11:37:13.353051 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:37:13 crc kubenswrapper[4728]: E1205 11:37:13.353912 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:37:27 crc kubenswrapper[4728]: I1205 11:37:27.351690 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:37:27 crc kubenswrapper[4728]: E1205 11:37:27.352789 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:37:42 crc kubenswrapper[4728]: I1205 11:37:42.352191 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:37:42 crc kubenswrapper[4728]: E1205 11:37:42.353680 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:37:57 crc kubenswrapper[4728]: I1205 11:37:57.352289 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:37:57 crc kubenswrapper[4728]: E1205 11:37:57.353726 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:37:57 crc kubenswrapper[4728]: I1205 11:37:57.920592 4728 generic.go:334] "Generic (PLEG): container finished" podID="cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" containerID="37645fa0b7b2e610d10fac00732af45b92c551beb8627d3df53d53b2e81e4d2b" exitCode=0 Dec 05 11:37:57 crc kubenswrapper[4728]: I1205 11:37:57.920638 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" event={"ID":"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9","Type":"ContainerDied","Data":"37645fa0b7b2e610d10fac00732af45b92c551beb8627d3df53d53b2e81e4d2b"} Dec 05 11:37:58 crc kubenswrapper[4728]: I1205 11:37:58.705060 4728 scope.go:117] "RemoveContainer" containerID="71f9bc9cb7917f02b5226e6794bf3ebca9f83c660b2e9133b2c650dbdf71af50" Dec 05 11:37:58 crc kubenswrapper[4728]: I1205 11:37:58.730626 4728 scope.go:117] "RemoveContainer" containerID="94934ec710944a1aaa2e39c0315e60c173dee33a92d4a98d3f432e7a94451cd0" Dec 05 11:37:58 crc kubenswrapper[4728]: I1205 11:37:58.754388 4728 scope.go:117] "RemoveContainer" containerID="63c4847beb750fbf73d14ddae6bb3d52e2bd760bfa502bf4e25eefa32a128928" Dec 05 11:37:58 crc kubenswrapper[4728]: I1205 11:37:58.779369 4728 scope.go:117] "RemoveContainer" containerID="3a19ced693d59db18eae63a9d5a3e6e7675cd7102c04a8bc3e3d85719ea7a42e" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.358937 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.476179 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory\") pod \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.476310 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle\") pod \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.476357 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key\") pod \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.476386 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwppl\" (UniqueName: \"kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl\") pod \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\" (UID: \"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9\") " Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.485050 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" (UID: "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.485073 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl" (OuterVolumeSpecName: "kube-api-access-lwppl") pod "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" (UID: "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9"). InnerVolumeSpecName "kube-api-access-lwppl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.518987 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory" (OuterVolumeSpecName: "inventory") pod "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" (UID: "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.526244 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" (UID: "cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.579085 4728 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.579112 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.579126 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwppl\" (UniqueName: \"kubernetes.io/projected/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-kube-api-access-lwppl\") on node \"crc\" DevicePath \"\"" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.579138 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.946217 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" event={"ID":"cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9","Type":"ContainerDied","Data":"d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa"} Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.946646 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7327e5f3bf9841bac6280953f5dd70e3552f43749913ca56a65f8f3f83e78fa" Dec 05 11:37:59 crc kubenswrapper[4728]: I1205 11:37:59.946750 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.047831 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc"] Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048354 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048381 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048403 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="extract-utilities" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048412 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="extract-utilities" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048444 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="extract-utilities" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048453 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="extract-utilities" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048471 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048478 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048496 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048504 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048516 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="extract-content" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048523 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="extract-content" Dec 05 11:38:00 crc kubenswrapper[4728]: E1205 11:38:00.048548 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="extract-content" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048556 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="extract-content" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048828 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048847 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1885fbe0-6acc-484b-948e-0511fbee36e2" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.048870 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3566a4b3-0657-4221-9536-cfebc1b59376" containerName="registry-server" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.049697 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.052712 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.052904 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.053618 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.057441 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.072290 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc"] Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.191170 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.191464 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll7lz\" (UniqueName: \"kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.191653 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.293485 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.293523 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll7lz\" (UniqueName: \"kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.293587 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.307890 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.308067 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.326741 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll7lz\" (UniqueName: \"kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:00 crc kubenswrapper[4728]: I1205 11:38:00.365738 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:38:01 crc kubenswrapper[4728]: I1205 11:38:01.022409 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc"] Dec 05 11:38:01 crc kubenswrapper[4728]: W1205 11:38:01.022613 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b484238_d80c_4274_b0b6_ea03a050e575.slice/crio-7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2 WatchSource:0}: Error finding container 7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2: Status 404 returned error can't find the container with id 7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2 Dec 05 11:38:01 crc kubenswrapper[4728]: I1205 11:38:01.025518 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:38:01 crc kubenswrapper[4728]: I1205 11:38:01.974752 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" event={"ID":"2b484238-d80c-4274-b0b6-ea03a050e575","Type":"ContainerStarted","Data":"e100f1cd95b875efb9bd9f5ca69e2b84beff305ed9ae24f5d3bd2d4fe6064da3"} Dec 05 11:38:01 crc kubenswrapper[4728]: I1205 11:38:01.975066 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" event={"ID":"2b484238-d80c-4274-b0b6-ea03a050e575","Type":"ContainerStarted","Data":"7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2"} Dec 05 11:38:02 crc kubenswrapper[4728]: I1205 11:38:02.008758 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" podStartSLOduration=1.487472145 podStartE2EDuration="2.008736918s" podCreationTimestamp="2025-12-05 11:38:00 +0000 UTC" firstStartedPulling="2025-12-05 11:38:01.025310865 +0000 UTC m=+1815.167433558" lastFinishedPulling="2025-12-05 11:38:01.546575618 +0000 UTC m=+1815.688698331" observedRunningTime="2025-12-05 11:38:01.993561046 +0000 UTC m=+1816.135683759" watchObservedRunningTime="2025-12-05 11:38:02.008736918 +0000 UTC m=+1816.150859621" Dec 05 11:38:12 crc kubenswrapper[4728]: I1205 11:38:12.352016 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:38:12 crc kubenswrapper[4728]: E1205 11:38:12.353005 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.057447 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-z24h9"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.071196 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-a7df-account-create-update-jvkxk"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.082002 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-zb2z4"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.092159 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1b8d-account-create-update-fsdrh"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.100229 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-a7df-account-create-update-jvkxk"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.108020 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-z24h9"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.115326 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-zb2z4"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.122346 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1b8d-account-create-update-fsdrh"] Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.366217 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12d05504-fe2a-4fc5-a9c9-33bc47aca64f" path="/var/lib/kubelet/pods/12d05504-fe2a-4fc5-a9c9-33bc47aca64f/volumes" Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.366885 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="816b032e-4ef3-46dc-a6bb-17610cf07073" path="/var/lib/kubelet/pods/816b032e-4ef3-46dc-a6bb-17610cf07073/volumes" Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.367480 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f832fc9-dbbb-4512-9ae8-431e113ce808" path="/var/lib/kubelet/pods/9f832fc9-dbbb-4512-9ae8-431e113ce808/volumes" Dec 05 11:38:18 crc kubenswrapper[4728]: I1205 11:38:18.368082 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dec7fcaf-6110-4102-8c09-3d79fe7763b4" path="/var/lib/kubelet/pods/dec7fcaf-6110-4102-8c09-3d79fe7763b4/volumes" Dec 05 11:38:21 crc kubenswrapper[4728]: I1205 11:38:21.043635 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-pcvqf"] Dec 05 11:38:21 crc kubenswrapper[4728]: I1205 11:38:21.060831 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-784a-account-create-update-c7xqd"] Dec 05 11:38:21 crc kubenswrapper[4728]: I1205 11:38:21.074265 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-pcvqf"] Dec 05 11:38:21 crc kubenswrapper[4728]: I1205 11:38:21.087965 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-784a-account-create-update-c7xqd"] Dec 05 11:38:22 crc kubenswrapper[4728]: I1205 11:38:22.366884 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66929ba8-a892-4070-8c21-f47af196251a" path="/var/lib/kubelet/pods/66929ba8-a892-4070-8c21-f47af196251a/volumes" Dec 05 11:38:22 crc kubenswrapper[4728]: I1205 11:38:22.367717 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d36a8b9f-7bab-4255-907f-4540e7a6b4ec" path="/var/lib/kubelet/pods/d36a8b9f-7bab-4255-907f-4540e7a6b4ec/volumes" Dec 05 11:38:25 crc kubenswrapper[4728]: I1205 11:38:25.352376 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:38:25 crc kubenswrapper[4728]: E1205 11:38:25.354871 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:38:37 crc kubenswrapper[4728]: I1205 11:38:37.351681 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:38:37 crc kubenswrapper[4728]: E1205 11:38:37.352457 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:38:49 crc kubenswrapper[4728]: I1205 11:38:49.352201 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:38:49 crc kubenswrapper[4728]: E1205 11:38:49.353031 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:38:58 crc kubenswrapper[4728]: I1205 11:38:58.878463 4728 scope.go:117] "RemoveContainer" containerID="8f3976536cb3b048be931eae838acdb03578e01467663f7ce8db5e795904855d" Dec 05 11:38:58 crc kubenswrapper[4728]: I1205 11:38:58.914877 4728 scope.go:117] "RemoveContainer" containerID="f7b59debd9d7c29556742f504dcece99cda7b776a7adae156794d6e5184ff9f1" Dec 05 11:38:58 crc kubenswrapper[4728]: I1205 11:38:58.993104 4728 scope.go:117] "RemoveContainer" containerID="61f95e19797206de67bcb54946603504d30662b6db348dd7606a2d6bf47173e8" Dec 05 11:38:59 crc kubenswrapper[4728]: I1205 11:38:59.059067 4728 scope.go:117] "RemoveContainer" containerID="0d67782f0246919a000e3f2e6a2567bef7a0632e21bf5e21dd1530eff47a7209" Dec 05 11:38:59 crc kubenswrapper[4728]: I1205 11:38:59.097485 4728 scope.go:117] "RemoveContainer" containerID="0f7a2586e480b180959bac03629689913722dfbce1480cb4e037f8a7c5044505" Dec 05 11:38:59 crc kubenswrapper[4728]: I1205 11:38:59.137635 4728 scope.go:117] "RemoveContainer" containerID="f5058486fa0b5940a687e46a12e68e178b6fd9bb8678782163812d56a1709489" Dec 05 11:38:59 crc kubenswrapper[4728]: I1205 11:38:59.169625 4728 scope.go:117] "RemoveContainer" containerID="4f840465a966123a0aea5f3710dbb71cf794b11a30d9032b546ae8d19426040c" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.084718 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-09b6-account-create-update-j5949"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.103151 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-09b6-account-create-update-j5949"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.116941 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ca6c-account-create-update-j9pql"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.128568 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-g59p8"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.137206 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-214d-account-create-update-lpk9z"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.146187 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-qgcwk"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.154874 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ca6c-account-create-update-j9pql"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.162589 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-g59p8"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.170169 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8304-account-create-update-5bb5c"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.177773 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-cblx8"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.185882 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-lzfv7"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.193715 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-214d-account-create-update-lpk9z"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.219482 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-qgcwk"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.234516 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-lzfv7"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.244644 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-cblx8"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.253500 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8304-account-create-update-5bb5c"] Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.353207 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:39:04 crc kubenswrapper[4728]: E1205 11:39:04.353643 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.364287 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09a3c745-2007-4f55-b706-24f148fc1805" path="/var/lib/kubelet/pods/09a3c745-2007-4f55-b706-24f148fc1805/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.365077 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1077e45f-8397-4377-a4e4-6dea1e8f16cb" path="/var/lib/kubelet/pods/1077e45f-8397-4377-a4e4-6dea1e8f16cb/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.365659 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f53576d-4b2d-4269-bea7-c3deb0d3292d" path="/var/lib/kubelet/pods/1f53576d-4b2d-4269-bea7-c3deb0d3292d/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.366394 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23d1273f-c19e-48d7-b792-0a6db00bc94d" path="/var/lib/kubelet/pods/23d1273f-c19e-48d7-b792-0a6db00bc94d/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.367500 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="604ad92d-004e-41d5-9467-f8df44cfd9b2" path="/var/lib/kubelet/pods/604ad92d-004e-41d5-9467-f8df44cfd9b2/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.368143 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72c64530-79c6-4b13-a18f-70c0c1405d8f" path="/var/lib/kubelet/pods/72c64530-79c6-4b13-a18f-70c0c1405d8f/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.368712 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b96bd60f-bf85-4263-9e18-c367e742c780" path="/var/lib/kubelet/pods/b96bd60f-bf85-4263-9e18-c367e742c780/volumes" Dec 05 11:39:04 crc kubenswrapper[4728]: I1205 11:39:04.369753 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c98ef602-49f4-4256-8a2d-b6a27ccaf903" path="/var/lib/kubelet/pods/c98ef602-49f4-4256-8a2d-b6a27ccaf903/volumes" Dec 05 11:39:15 crc kubenswrapper[4728]: I1205 11:39:15.058626 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-dmlfm"] Dec 05 11:39:15 crc kubenswrapper[4728]: I1205 11:39:15.071063 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-dmlfm"] Dec 05 11:39:16 crc kubenswrapper[4728]: I1205 11:39:16.368053 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="540467c2-2e5b-489d-ab98-42b9d580136b" path="/var/lib/kubelet/pods/540467c2-2e5b-489d-ab98-42b9d580136b/volumes" Dec 05 11:39:17 crc kubenswrapper[4728]: I1205 11:39:17.033366 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-8fpw9"] Dec 05 11:39:17 crc kubenswrapper[4728]: I1205 11:39:17.041660 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-8fpw9"] Dec 05 11:39:18 crc kubenswrapper[4728]: I1205 11:39:18.352285 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:39:18 crc kubenswrapper[4728]: E1205 11:39:18.352610 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:39:18 crc kubenswrapper[4728]: I1205 11:39:18.365012 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ec6fee-e0fe-471a-a673-7856319a8dd8" path="/var/lib/kubelet/pods/30ec6fee-e0fe-471a-a673-7856319a8dd8/volumes" Dec 05 11:39:29 crc kubenswrapper[4728]: I1205 11:39:29.352297 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:39:29 crc kubenswrapper[4728]: E1205 11:39:29.353047 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:39:37 crc kubenswrapper[4728]: I1205 11:39:37.924252 4728 generic.go:334] "Generic (PLEG): container finished" podID="2b484238-d80c-4274-b0b6-ea03a050e575" containerID="e100f1cd95b875efb9bd9f5ca69e2b84beff305ed9ae24f5d3bd2d4fe6064da3" exitCode=0 Dec 05 11:39:37 crc kubenswrapper[4728]: I1205 11:39:37.924367 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" event={"ID":"2b484238-d80c-4274-b0b6-ea03a050e575","Type":"ContainerDied","Data":"e100f1cd95b875efb9bd9f5ca69e2b84beff305ed9ae24f5d3bd2d4fe6064da3"} Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.361710 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.481458 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ll7lz\" (UniqueName: \"kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz\") pod \"2b484238-d80c-4274-b0b6-ea03a050e575\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.481574 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key\") pod \"2b484238-d80c-4274-b0b6-ea03a050e575\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.481628 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory\") pod \"2b484238-d80c-4274-b0b6-ea03a050e575\" (UID: \"2b484238-d80c-4274-b0b6-ea03a050e575\") " Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.487727 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz" (OuterVolumeSpecName: "kube-api-access-ll7lz") pod "2b484238-d80c-4274-b0b6-ea03a050e575" (UID: "2b484238-d80c-4274-b0b6-ea03a050e575"). InnerVolumeSpecName "kube-api-access-ll7lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.512520 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2b484238-d80c-4274-b0b6-ea03a050e575" (UID: "2b484238-d80c-4274-b0b6-ea03a050e575"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.512704 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory" (OuterVolumeSpecName: "inventory") pod "2b484238-d80c-4274-b0b6-ea03a050e575" (UID: "2b484238-d80c-4274-b0b6-ea03a050e575"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.584502 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ll7lz\" (UniqueName: \"kubernetes.io/projected/2b484238-d80c-4274-b0b6-ea03a050e575-kube-api-access-ll7lz\") on node \"crc\" DevicePath \"\"" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.584532 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.584543 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b484238-d80c-4274-b0b6-ea03a050e575-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.944727 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" event={"ID":"2b484238-d80c-4274-b0b6-ea03a050e575","Type":"ContainerDied","Data":"7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2"} Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.945004 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a401a5e8f18a6f9e30223d1f2cc2672088b7fbe67319f43d934fa2d8c83b5c2" Dec 05 11:39:39 crc kubenswrapper[4728]: I1205 11:39:39.944776 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.023122 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6"] Dec 05 11:39:40 crc kubenswrapper[4728]: E1205 11:39:40.023542 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b484238-d80c-4274-b0b6-ea03a050e575" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.023561 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b484238-d80c-4274-b0b6-ea03a050e575" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.023752 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b484238-d80c-4274-b0b6-ea03a050e575" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.024388 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.025900 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.026087 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.026116 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.027401 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.050702 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6"] Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.198136 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.198827 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdgv4\" (UniqueName: \"kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.199125 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.302110 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdgv4\" (UniqueName: \"kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.302298 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.302349 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.307464 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.307568 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.322042 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdgv4\" (UniqueName: \"kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-24vs6\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.341346 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.850724 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6"] Dec 05 11:39:40 crc kubenswrapper[4728]: W1205 11:39:40.856955 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96cca126_d9b0_4c1c_93d8_63872e4a5e1c.slice/crio-c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7 WatchSource:0}: Error finding container c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7: Status 404 returned error can't find the container with id c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7 Dec 05 11:39:40 crc kubenswrapper[4728]: I1205 11:39:40.955504 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" event={"ID":"96cca126-d9b0-4c1c-93d8-63872e4a5e1c","Type":"ContainerStarted","Data":"c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7"} Dec 05 11:39:41 crc kubenswrapper[4728]: I1205 11:39:41.352865 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:39:41 crc kubenswrapper[4728]: E1205 11:39:41.353232 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:39:43 crc kubenswrapper[4728]: I1205 11:39:43.991952 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" event={"ID":"96cca126-d9b0-4c1c-93d8-63872e4a5e1c","Type":"ContainerStarted","Data":"fbc497485928c5e2dcd3e006193bd34ff42f263dc5aca3e59eddb4d49f827755"} Dec 05 11:39:44 crc kubenswrapper[4728]: I1205 11:39:44.033694 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" podStartSLOduration=1.890877825 podStartE2EDuration="4.033661675s" podCreationTimestamp="2025-12-05 11:39:40 +0000 UTC" firstStartedPulling="2025-12-05 11:39:40.861369871 +0000 UTC m=+1915.003492564" lastFinishedPulling="2025-12-05 11:39:43.004153701 +0000 UTC m=+1917.146276414" observedRunningTime="2025-12-05 11:39:44.019400508 +0000 UTC m=+1918.161523251" watchObservedRunningTime="2025-12-05 11:39:44.033661675 +0000 UTC m=+1918.175784418" Dec 05 11:39:53 crc kubenswrapper[4728]: I1205 11:39:53.052413 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-j552g"] Dec 05 11:39:53 crc kubenswrapper[4728]: I1205 11:39:53.061806 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-j552g"] Dec 05 11:39:54 crc kubenswrapper[4728]: I1205 11:39:54.354666 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:39:54 crc kubenswrapper[4728]: E1205 11:39:54.356537 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:39:54 crc kubenswrapper[4728]: I1205 11:39:54.370639 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e792568-52ab-4080-bd08-8d6ef2f15ee7" path="/var/lib/kubelet/pods/9e792568-52ab-4080-bd08-8d6ef2f15ee7/volumes" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.365024 4728 scope.go:117] "RemoveContainer" containerID="266530df784a322eb87fc13e1fd3e4001c9e02cccbe288ba7aaf883c9968a82c" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.418971 4728 scope.go:117] "RemoveContainer" containerID="b6f2fed6267b0d4efda811159d9e88c10a18e062aded7b1cb2891abeba8f252f" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.462377 4728 scope.go:117] "RemoveContainer" containerID="fa8cd64a144b422c33122e9305d8132c9a166c3b62525c64f2f2645ea805ef2d" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.513830 4728 scope.go:117] "RemoveContainer" containerID="03a65b5ee9a21cbc1b78c58eaf7bd51a3afbf8d82414e0138044b5e4a9c34a49" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.575270 4728 scope.go:117] "RemoveContainer" containerID="148d543b70dcd94ea80e1f5af6e95f88aba4885de7f3365130208afbd2e75992" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.596613 4728 scope.go:117] "RemoveContainer" containerID="ecf8bdf78ab4f92089659d22261096826d89c943e519d73418dc0d27a92ac7b6" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.641238 4728 scope.go:117] "RemoveContainer" containerID="8aba3552233c2cc4bfb86d76b28eab73d4bfbc4134750835367c51395c5981d0" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.664885 4728 scope.go:117] "RemoveContainer" containerID="3544b9cfba4163ae7f828a3a06e45900a218e576a528e112093eb7d4515cb940" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.684142 4728 scope.go:117] "RemoveContainer" containerID="795e8c6a5bd2c0182ab334359b95fb0893f4aa67fcc22a98c49d2116d6e627a7" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.707461 4728 scope.go:117] "RemoveContainer" containerID="1f2745aa7ff2d682ac614a2a12900c2d5859b7f9b0bae3fed435925e1ee28544" Dec 05 11:39:59 crc kubenswrapper[4728]: I1205 11:39:59.729609 4728 scope.go:117] "RemoveContainer" containerID="838d8f1f4dc964f976e1fc7ee93b6d68917231e4e59f7bd1b57e691a7d301e82" Dec 05 11:40:03 crc kubenswrapper[4728]: I1205 11:40:03.038159 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sg5mr"] Dec 05 11:40:03 crc kubenswrapper[4728]: I1205 11:40:03.050394 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sg5mr"] Dec 05 11:40:04 crc kubenswrapper[4728]: I1205 11:40:04.366498 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="198c5a44-d3b2-4afd-b034-d898309e0f42" path="/var/lib/kubelet/pods/198c5a44-d3b2-4afd-b034-d898309e0f42/volumes" Dec 05 11:40:06 crc kubenswrapper[4728]: I1205 11:40:06.035495 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-27gkv"] Dec 05 11:40:06 crc kubenswrapper[4728]: I1205 11:40:06.048352 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-27gkv"] Dec 05 11:40:06 crc kubenswrapper[4728]: I1205 11:40:06.379163 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a984c5b-a1ab-47ab-9acd-998c05072ea1" path="/var/lib/kubelet/pods/1a984c5b-a1ab-47ab-9acd-998c05072ea1/volumes" Dec 05 11:40:09 crc kubenswrapper[4728]: I1205 11:40:09.352056 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:40:09 crc kubenswrapper[4728]: E1205 11:40:09.352829 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:40:21 crc kubenswrapper[4728]: I1205 11:40:21.037615 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-fm2df"] Dec 05 11:40:21 crc kubenswrapper[4728]: I1205 11:40:21.045477 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-fm2df"] Dec 05 11:40:21 crc kubenswrapper[4728]: I1205 11:40:21.352944 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:40:21 crc kubenswrapper[4728]: E1205 11:40:21.353577 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:40:22 crc kubenswrapper[4728]: I1205 11:40:22.371484 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="555e531f-162f-4097-ba36-53b6ddedd6d8" path="/var/lib/kubelet/pods/555e531f-162f-4097-ba36-53b6ddedd6d8/volumes" Dec 05 11:40:25 crc kubenswrapper[4728]: I1205 11:40:25.034072 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-dtppr"] Dec 05 11:40:25 crc kubenswrapper[4728]: I1205 11:40:25.043635 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-dtppr"] Dec 05 11:40:26 crc kubenswrapper[4728]: I1205 11:40:26.370247 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="875f6746-18ef-483c-bbb4-80d7dbe4b1a1" path="/var/lib/kubelet/pods/875f6746-18ef-483c-bbb4-80d7dbe4b1a1/volumes" Dec 05 11:40:33 crc kubenswrapper[4728]: I1205 11:40:33.352679 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:40:33 crc kubenswrapper[4728]: I1205 11:40:33.597995 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84"} Dec 05 11:40:47 crc kubenswrapper[4728]: I1205 11:40:47.054888 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-zbbv6"] Dec 05 11:40:47 crc kubenswrapper[4728]: I1205 11:40:47.066871 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-zbbv6"] Dec 05 11:40:48 crc kubenswrapper[4728]: I1205 11:40:48.374398 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="537c7276-c2c9-4427-9b2b-5e835e3bc2d7" path="/var/lib/kubelet/pods/537c7276-c2c9-4427-9b2b-5e835e3bc2d7/volumes" Dec 05 11:40:55 crc kubenswrapper[4728]: I1205 11:40:55.875204 4728 generic.go:334] "Generic (PLEG): container finished" podID="96cca126-d9b0-4c1c-93d8-63872e4a5e1c" containerID="fbc497485928c5e2dcd3e006193bd34ff42f263dc5aca3e59eddb4d49f827755" exitCode=0 Dec 05 11:40:55 crc kubenswrapper[4728]: I1205 11:40:55.875333 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" event={"ID":"96cca126-d9b0-4c1c-93d8-63872e4a5e1c","Type":"ContainerDied","Data":"fbc497485928c5e2dcd3e006193bd34ff42f263dc5aca3e59eddb4d49f827755"} Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.385022 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.450526 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory\") pod \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.450875 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdgv4\" (UniqueName: \"kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4\") pod \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.451061 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key\") pod \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\" (UID: \"96cca126-d9b0-4c1c-93d8-63872e4a5e1c\") " Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.457660 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4" (OuterVolumeSpecName: "kube-api-access-sdgv4") pod "96cca126-d9b0-4c1c-93d8-63872e4a5e1c" (UID: "96cca126-d9b0-4c1c-93d8-63872e4a5e1c"). InnerVolumeSpecName "kube-api-access-sdgv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.492928 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96cca126-d9b0-4c1c-93d8-63872e4a5e1c" (UID: "96cca126-d9b0-4c1c-93d8-63872e4a5e1c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.493418 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory" (OuterVolumeSpecName: "inventory") pod "96cca126-d9b0-4c1c-93d8-63872e4a5e1c" (UID: "96cca126-d9b0-4c1c-93d8-63872e4a5e1c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.554102 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdgv4\" (UniqueName: \"kubernetes.io/projected/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-kube-api-access-sdgv4\") on node \"crc\" DevicePath \"\"" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.554135 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.554145 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96cca126-d9b0-4c1c-93d8-63872e4a5e1c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.895869 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" event={"ID":"96cca126-d9b0-4c1c-93d8-63872e4a5e1c","Type":"ContainerDied","Data":"c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7"} Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.896114 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c467c3a599692836eaab1269162244f7d81ee47006a2e7aec4dee3012e29e7a7" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.895928 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-24vs6" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.982846 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm"] Dec 05 11:40:57 crc kubenswrapper[4728]: E1205 11:40:57.983300 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96cca126-d9b0-4c1c-93d8-63872e4a5e1c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.983323 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="96cca126-d9b0-4c1c-93d8-63872e4a5e1c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.983568 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="96cca126-d9b0-4c1c-93d8-63872e4a5e1c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.984422 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.990526 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.991046 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.991053 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.991410 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:40:57 crc kubenswrapper[4728]: I1205 11:40:57.999652 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm"] Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.069314 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.069526 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvpdd\" (UniqueName: \"kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.069611 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.172411 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.172530 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvpdd\" (UniqueName: \"kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.172585 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.176896 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.186403 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.188415 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvpdd\" (UniqueName: \"kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:58 crc kubenswrapper[4728]: I1205 11:40:58.349105 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.016731 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm"] Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.924292 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" event={"ID":"8ea93fe3-4992-4d62-b2c2-f67ca4763c75","Type":"ContainerStarted","Data":"e6d4b5264e9bb73ca48b880069425e951057782e961ff20380047190851ce4bc"} Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.924628 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" event={"ID":"8ea93fe3-4992-4d62-b2c2-f67ca4763c75","Type":"ContainerStarted","Data":"e81428dc1dbd0767bbe3d9168dde9204a055636a065a0f138940e43d0645e7e2"} Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.925299 4728 scope.go:117] "RemoveContainer" containerID="b9647a262dee9f754f4071a8e491ac32fb1b5b744bbd889caa0be4f71a8b42c9" Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.955652 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" podStartSLOduration=2.337088855 podStartE2EDuration="2.955627478s" podCreationTimestamp="2025-12-05 11:40:57 +0000 UTC" firstStartedPulling="2025-12-05 11:40:59.016536958 +0000 UTC m=+1993.158659651" lastFinishedPulling="2025-12-05 11:40:59.635075581 +0000 UTC m=+1993.777198274" observedRunningTime="2025-12-05 11:40:59.950617682 +0000 UTC m=+1994.092740395" watchObservedRunningTime="2025-12-05 11:40:59.955627478 +0000 UTC m=+1994.097750181" Dec 05 11:40:59 crc kubenswrapper[4728]: I1205 11:40:59.980842 4728 scope.go:117] "RemoveContainer" containerID="8ef8f65ad94c428c88af66587e1fa39c7c9a3e85582c08325973ab5fff66cdaa" Dec 05 11:41:00 crc kubenswrapper[4728]: I1205 11:41:00.018902 4728 scope.go:117] "RemoveContainer" containerID="1ca97bc11021d729f13692aab870d70e82f18e3fa12ecc24a41950063813b560" Dec 05 11:41:00 crc kubenswrapper[4728]: I1205 11:41:00.057384 4728 scope.go:117] "RemoveContainer" containerID="abf8576f46b6ec4882d872d4cf1e3cf3478459a0b6aacba5878baf11162ff85b" Dec 05 11:41:00 crc kubenswrapper[4728]: I1205 11:41:00.091845 4728 scope.go:117] "RemoveContainer" containerID="cb05f6ad3f1272fa578ed38c9b80c038868355b7da27c9a78c973da8868ebff3" Dec 05 11:41:04 crc kubenswrapper[4728]: E1205 11:41:04.797006 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ea93fe3_4992_4d62_b2c2_f67ca4763c75.slice/crio-conmon-e6d4b5264e9bb73ca48b880069425e951057782e961ff20380047190851ce4bc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ea93fe3_4992_4d62_b2c2_f67ca4763c75.slice/crio-e6d4b5264e9bb73ca48b880069425e951057782e961ff20380047190851ce4bc.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:41:04 crc kubenswrapper[4728]: I1205 11:41:04.972889 4728 generic.go:334] "Generic (PLEG): container finished" podID="8ea93fe3-4992-4d62-b2c2-f67ca4763c75" containerID="e6d4b5264e9bb73ca48b880069425e951057782e961ff20380047190851ce4bc" exitCode=0 Dec 05 11:41:04 crc kubenswrapper[4728]: I1205 11:41:04.972959 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" event={"ID":"8ea93fe3-4992-4d62-b2c2-f67ca4763c75","Type":"ContainerDied","Data":"e6d4b5264e9bb73ca48b880069425e951057782e961ff20380047190851ce4bc"} Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.417596 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.560379 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvpdd\" (UniqueName: \"kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd\") pod \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.560442 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory\") pod \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.560596 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key\") pod \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\" (UID: \"8ea93fe3-4992-4d62-b2c2-f67ca4763c75\") " Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.565818 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd" (OuterVolumeSpecName: "kube-api-access-tvpdd") pod "8ea93fe3-4992-4d62-b2c2-f67ca4763c75" (UID: "8ea93fe3-4992-4d62-b2c2-f67ca4763c75"). InnerVolumeSpecName "kube-api-access-tvpdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.591408 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory" (OuterVolumeSpecName: "inventory") pod "8ea93fe3-4992-4d62-b2c2-f67ca4763c75" (UID: "8ea93fe3-4992-4d62-b2c2-f67ca4763c75"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.594923 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8ea93fe3-4992-4d62-b2c2-f67ca4763c75" (UID: "8ea93fe3-4992-4d62-b2c2-f67ca4763c75"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.665546 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvpdd\" (UniqueName: \"kubernetes.io/projected/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-kube-api-access-tvpdd\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.665587 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.665598 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ea93fe3-4992-4d62-b2c2-f67ca4763c75-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.997324 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" event={"ID":"8ea93fe3-4992-4d62-b2c2-f67ca4763c75","Type":"ContainerDied","Data":"e81428dc1dbd0767bbe3d9168dde9204a055636a065a0f138940e43d0645e7e2"} Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.997706 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e81428dc1dbd0767bbe3d9168dde9204a055636a065a0f138940e43d0645e7e2" Dec 05 11:41:06 crc kubenswrapper[4728]: I1205 11:41:06.997368 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.143203 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58"] Dec 05 11:41:07 crc kubenswrapper[4728]: E1205 11:41:07.143583 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea93fe3-4992-4d62-b2c2-f67ca4763c75" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.143598 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea93fe3-4992-4d62-b2c2-f67ca4763c75" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.143819 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea93fe3-4992-4d62-b2c2-f67ca4763c75" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.144385 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.149193 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.149288 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.150015 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.150495 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.160684 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58"] Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.175054 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcztn\" (UniqueName: \"kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.175172 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.175226 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.277384 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcztn\" (UniqueName: \"kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.277466 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.277499 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.281264 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.283550 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.297269 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcztn\" (UniqueName: \"kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jh58\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:07 crc kubenswrapper[4728]: I1205 11:41:07.464400 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:08 crc kubenswrapper[4728]: I1205 11:41:08.055767 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58"] Dec 05 11:41:09 crc kubenswrapper[4728]: I1205 11:41:09.017119 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" event={"ID":"d2794a25-aa06-4146-957e-5438b4005382","Type":"ContainerStarted","Data":"be97e73f45fa6e2cc8615aa3ecfa315266e9c6b71a40cd0ad868f76cb26ab6f4"} Dec 05 11:41:09 crc kubenswrapper[4728]: I1205 11:41:09.017676 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" event={"ID":"d2794a25-aa06-4146-957e-5438b4005382","Type":"ContainerStarted","Data":"83557e2b6c37395cab8b8901a0c2d33e266dba9326eee23aa1a29d11133e8d17"} Dec 05 11:41:09 crc kubenswrapper[4728]: I1205 11:41:09.039836 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" podStartSLOduration=1.6303045680000001 podStartE2EDuration="2.039813499s" podCreationTimestamp="2025-12-05 11:41:07 +0000 UTC" firstStartedPulling="2025-12-05 11:41:08.071010113 +0000 UTC m=+2002.213132806" lastFinishedPulling="2025-12-05 11:41:08.480519044 +0000 UTC m=+2002.622641737" observedRunningTime="2025-12-05 11:41:09.030495466 +0000 UTC m=+2003.172618159" watchObservedRunningTime="2025-12-05 11:41:09.039813499 +0000 UTC m=+2003.181936212" Dec 05 11:41:11 crc kubenswrapper[4728]: I1205 11:41:11.055617 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-wqpdw"] Dec 05 11:41:11 crc kubenswrapper[4728]: I1205 11:41:11.068186 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-wqpdw"] Dec 05 11:41:12 crc kubenswrapper[4728]: I1205 11:41:12.039090 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5bbb-account-create-update-2xrjf"] Dec 05 11:41:12 crc kubenswrapper[4728]: I1205 11:41:12.056345 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5bbb-account-create-update-2xrjf"] Dec 05 11:41:12 crc kubenswrapper[4728]: I1205 11:41:12.364419 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29402829-a7a8-4c70-b9d4-b0301b97ab76" path="/var/lib/kubelet/pods/29402829-a7a8-4c70-b9d4-b0301b97ab76/volumes" Dec 05 11:41:12 crc kubenswrapper[4728]: I1205 11:41:12.365303 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59f02d46-5d39-4908-bde8-f957fd7eb940" path="/var/lib/kubelet/pods/59f02d46-5d39-4908-bde8-f957fd7eb940/volumes" Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.031131 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1044-account-create-update-drt9p"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.042030 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-vs4fm"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.052051 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-279f-account-create-update-mzbwz"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.061375 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-z59x7"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.069788 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1044-account-create-update-drt9p"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.077461 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-vs4fm"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.085063 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-z59x7"] Dec 05 11:41:13 crc kubenswrapper[4728]: I1205 11:41:13.093169 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-279f-account-create-update-mzbwz"] Dec 05 11:41:14 crc kubenswrapper[4728]: I1205 11:41:14.364253 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d" path="/var/lib/kubelet/pods/0c0925d7-2534-4aa8-8d4b-8dee7ab50f8d/volumes" Dec 05 11:41:14 crc kubenswrapper[4728]: I1205 11:41:14.365315 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29d916ac-a71a-454f-bc05-39b8426b4e64" path="/var/lib/kubelet/pods/29d916ac-a71a-454f-bc05-39b8426b4e64/volumes" Dec 05 11:41:14 crc kubenswrapper[4728]: I1205 11:41:14.365851 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba6ad75b-eef0-4bd1-b008-0661eabd1bc4" path="/var/lib/kubelet/pods/ba6ad75b-eef0-4bd1-b008-0661eabd1bc4/volumes" Dec 05 11:41:14 crc kubenswrapper[4728]: I1205 11:41:14.366359 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf847146-79d3-4259-9e19-f78f94b25dfa" path="/var/lib/kubelet/pods/cf847146-79d3-4259-9e19-f78f94b25dfa/volumes" Dec 05 11:41:40 crc kubenswrapper[4728]: I1205 11:41:40.027442 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9kplx"] Dec 05 11:41:40 crc kubenswrapper[4728]: I1205 11:41:40.036663 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9kplx"] Dec 05 11:41:40 crc kubenswrapper[4728]: I1205 11:41:40.363859 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="520798b6-b938-403f-adc6-5609f6bcfd72" path="/var/lib/kubelet/pods/520798b6-b938-403f-adc6-5609f6bcfd72/volumes" Dec 05 11:41:45 crc kubenswrapper[4728]: E1205 11:41:45.940053 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2794a25_aa06_4146_957e_5438b4005382.slice/crio-be97e73f45fa6e2cc8615aa3ecfa315266e9c6b71a40cd0ad868f76cb26ab6f4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2794a25_aa06_4146_957e_5438b4005382.slice/crio-conmon-be97e73f45fa6e2cc8615aa3ecfa315266e9c6b71a40cd0ad868f76cb26ab6f4.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:41:46 crc kubenswrapper[4728]: I1205 11:41:46.359133 4728 generic.go:334] "Generic (PLEG): container finished" podID="d2794a25-aa06-4146-957e-5438b4005382" containerID="be97e73f45fa6e2cc8615aa3ecfa315266e9c6b71a40cd0ad868f76cb26ab6f4" exitCode=0 Dec 05 11:41:46 crc kubenswrapper[4728]: I1205 11:41:46.370749 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" event={"ID":"d2794a25-aa06-4146-957e-5438b4005382","Type":"ContainerDied","Data":"be97e73f45fa6e2cc8615aa3ecfa315266e9c6b71a40cd0ad868f76cb26ab6f4"} Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.839683 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.920328 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcztn\" (UniqueName: \"kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn\") pod \"d2794a25-aa06-4146-957e-5438b4005382\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.920519 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key\") pod \"d2794a25-aa06-4146-957e-5438b4005382\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.920729 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory\") pod \"d2794a25-aa06-4146-957e-5438b4005382\" (UID: \"d2794a25-aa06-4146-957e-5438b4005382\") " Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.927383 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn" (OuterVolumeSpecName: "kube-api-access-dcztn") pod "d2794a25-aa06-4146-957e-5438b4005382" (UID: "d2794a25-aa06-4146-957e-5438b4005382"). InnerVolumeSpecName "kube-api-access-dcztn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.958624 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory" (OuterVolumeSpecName: "inventory") pod "d2794a25-aa06-4146-957e-5438b4005382" (UID: "d2794a25-aa06-4146-957e-5438b4005382"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:41:47 crc kubenswrapper[4728]: I1205 11:41:47.958772 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2794a25-aa06-4146-957e-5438b4005382" (UID: "d2794a25-aa06-4146-957e-5438b4005382"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.024153 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.024185 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2794a25-aa06-4146-957e-5438b4005382-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.024195 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcztn\" (UniqueName: \"kubernetes.io/projected/d2794a25-aa06-4146-957e-5438b4005382-kube-api-access-dcztn\") on node \"crc\" DevicePath \"\"" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.383306 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" event={"ID":"d2794a25-aa06-4146-957e-5438b4005382","Type":"ContainerDied","Data":"83557e2b6c37395cab8b8901a0c2d33e266dba9326eee23aa1a29d11133e8d17"} Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.383346 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83557e2b6c37395cab8b8901a0c2d33e266dba9326eee23aa1a29d11133e8d17" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.383407 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jh58" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.549850 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q"] Dec 05 11:41:48 crc kubenswrapper[4728]: E1205 11:41:48.550324 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2794a25-aa06-4146-957e-5438b4005382" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.550342 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2794a25-aa06-4146-957e-5438b4005382" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.550529 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2794a25-aa06-4146-957e-5438b4005382" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.551232 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.557481 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.557683 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.557885 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.558010 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.576619 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q"] Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.636470 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvnts\" (UniqueName: \"kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.636598 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.636940 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.739398 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.739466 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvnts\" (UniqueName: \"kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.739563 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.744472 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.744830 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.760364 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvnts\" (UniqueName: \"kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:48 crc kubenswrapper[4728]: I1205 11:41:48.883759 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:41:49 crc kubenswrapper[4728]: W1205 11:41:49.456632 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4231d17_68db_4e1d_b39d_6d3affe3c6a5.slice/crio-a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80 WatchSource:0}: Error finding container a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80: Status 404 returned error can't find the container with id a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80 Dec 05 11:41:49 crc kubenswrapper[4728]: I1205 11:41:49.463187 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q"] Dec 05 11:41:50 crc kubenswrapper[4728]: I1205 11:41:50.402731 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" event={"ID":"a4231d17-68db-4e1d-b39d-6d3affe3c6a5","Type":"ContainerStarted","Data":"95f765be8995bc2602eb85e572352f21c3a08e1c96ef78277b21e2d372a6e592"} Dec 05 11:41:50 crc kubenswrapper[4728]: I1205 11:41:50.403095 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" event={"ID":"a4231d17-68db-4e1d-b39d-6d3affe3c6a5","Type":"ContainerStarted","Data":"a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80"} Dec 05 11:41:50 crc kubenswrapper[4728]: I1205 11:41:50.419727 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" podStartSLOduration=1.9679411999999998 podStartE2EDuration="2.419705631s" podCreationTimestamp="2025-12-05 11:41:48 +0000 UTC" firstStartedPulling="2025-12-05 11:41:49.458931461 +0000 UTC m=+2043.601054164" lastFinishedPulling="2025-12-05 11:41:49.910695892 +0000 UTC m=+2044.052818595" observedRunningTime="2025-12-05 11:41:50.416364354 +0000 UTC m=+2044.558487047" watchObservedRunningTime="2025-12-05 11:41:50.419705631 +0000 UTC m=+2044.561828324" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.202616 4728 scope.go:117] "RemoveContainer" containerID="2b42f847a28395ea15946363dc65dd757c05238163f8daf33a3faab37cc4c804" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.254505 4728 scope.go:117] "RemoveContainer" containerID="0db630bde519961cae74cf61e657588211f101ea3a922fd09ab35117aaacdc41" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.280615 4728 scope.go:117] "RemoveContainer" containerID="21cd52edcf368b3b529b286788745601d93b95e413f147a661c2a85aade0ac18" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.333376 4728 scope.go:117] "RemoveContainer" containerID="1ae60a534d591c5e9f0548999c894388089c96a40e93a06ea8ae76d06f356fe2" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.376899 4728 scope.go:117] "RemoveContainer" containerID="369702e09105a37cc53baf80d353a3f6e3dc4beb40f464c71819a77c8deb8bb1" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.430343 4728 scope.go:117] "RemoveContainer" containerID="35bc61f1051978215dfb8040ae75392ce02b65cbc988fe46eb74d477d2a26e82" Dec 05 11:42:00 crc kubenswrapper[4728]: I1205 11:42:00.470657 4728 scope.go:117] "RemoveContainer" containerID="4d100edc95d8a92026fc013b9fd5ba0428c198c0950a3c471be595a72ec49716" Dec 05 11:42:04 crc kubenswrapper[4728]: I1205 11:42:04.050878 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6x7gh"] Dec 05 11:42:04 crc kubenswrapper[4728]: I1205 11:42:04.058751 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6x7gh"] Dec 05 11:42:04 crc kubenswrapper[4728]: I1205 11:42:04.362116 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e" path="/var/lib/kubelet/pods/bdffaa6a-7be2-40fa-915d-f36ff3a2ad9e/volumes" Dec 05 11:42:05 crc kubenswrapper[4728]: I1205 11:42:05.031964 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-k7g8m"] Dec 05 11:42:05 crc kubenswrapper[4728]: I1205 11:42:05.042934 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-k7g8m"] Dec 05 11:42:06 crc kubenswrapper[4728]: I1205 11:42:06.385598 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a537dee-6a60-42e4-a928-c43d4db07171" path="/var/lib/kubelet/pods/3a537dee-6a60-42e4-a928-c43d4db07171/volumes" Dec 05 11:42:40 crc kubenswrapper[4728]: I1205 11:42:40.911347 4728 generic.go:334] "Generic (PLEG): container finished" podID="a4231d17-68db-4e1d-b39d-6d3affe3c6a5" containerID="95f765be8995bc2602eb85e572352f21c3a08e1c96ef78277b21e2d372a6e592" exitCode=0 Dec 05 11:42:40 crc kubenswrapper[4728]: I1205 11:42:40.911430 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" event={"ID":"a4231d17-68db-4e1d-b39d-6d3affe3c6a5","Type":"ContainerDied","Data":"95f765be8995bc2602eb85e572352f21c3a08e1c96ef78277b21e2d372a6e592"} Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.402292 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.505424 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key\") pod \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.505484 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory\") pod \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.505712 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvnts\" (UniqueName: \"kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts\") pod \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\" (UID: \"a4231d17-68db-4e1d-b39d-6d3affe3c6a5\") " Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.512990 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts" (OuterVolumeSpecName: "kube-api-access-lvnts") pod "a4231d17-68db-4e1d-b39d-6d3affe3c6a5" (UID: "a4231d17-68db-4e1d-b39d-6d3affe3c6a5"). InnerVolumeSpecName "kube-api-access-lvnts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.545619 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory" (OuterVolumeSpecName: "inventory") pod "a4231d17-68db-4e1d-b39d-6d3affe3c6a5" (UID: "a4231d17-68db-4e1d-b39d-6d3affe3c6a5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.546006 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a4231d17-68db-4e1d-b39d-6d3affe3c6a5" (UID: "a4231d17-68db-4e1d-b39d-6d3affe3c6a5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.607522 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.607557 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.607569 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvnts\" (UniqueName: \"kubernetes.io/projected/a4231d17-68db-4e1d-b39d-6d3affe3c6a5-kube-api-access-lvnts\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.929260 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" event={"ID":"a4231d17-68db-4e1d-b39d-6d3affe3c6a5","Type":"ContainerDied","Data":"a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80"} Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.929592 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5a76d312e6b9a1bed72079cd205c16408645621eaacb22092d42b321d1daa80" Dec 05 11:42:42 crc kubenswrapper[4728]: I1205 11:42:42.929327 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.110009 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p6mdl"] Dec 05 11:42:43 crc kubenswrapper[4728]: E1205 11:42:43.110573 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4231d17-68db-4e1d-b39d-6d3affe3c6a5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.110608 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4231d17-68db-4e1d-b39d-6d3affe3c6a5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.110961 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4231d17-68db-4e1d-b39d-6d3affe3c6a5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.111952 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.125221 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.125591 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.126050 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p6mdl"] Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.141269 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.141464 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.221832 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.221928 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.221976 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwnqj\" (UniqueName: \"kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.323618 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwnqj\" (UniqueName: \"kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.323842 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.323892 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.329537 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.335441 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.346740 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwnqj\" (UniqueName: \"kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj\") pod \"ssh-known-hosts-edpm-deployment-p6mdl\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.456479 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:43 crc kubenswrapper[4728]: I1205 11:42:43.999075 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p6mdl"] Dec 05 11:42:44 crc kubenswrapper[4728]: I1205 11:42:44.947957 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" event={"ID":"63543ad1-6aa4-4b72-aa6d-4438fad98d08","Type":"ContainerStarted","Data":"39beeb32e865948deea06fff5599d2d47a42f3f787054dc2eb3de18dda2bef5f"} Dec 05 11:42:44 crc kubenswrapper[4728]: I1205 11:42:44.948212 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" event={"ID":"63543ad1-6aa4-4b72-aa6d-4438fad98d08","Type":"ContainerStarted","Data":"dc3233c45f56c663c52ce6207d7bc567c7583c41b544b91b5740039c651c1c2c"} Dec 05 11:42:44 crc kubenswrapper[4728]: I1205 11:42:44.968730 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" podStartSLOduration=1.527012272 podStartE2EDuration="1.968711595s" podCreationTimestamp="2025-12-05 11:42:43 +0000 UTC" firstStartedPulling="2025-12-05 11:42:44.004517587 +0000 UTC m=+2098.146640280" lastFinishedPulling="2025-12-05 11:42:44.44621691 +0000 UTC m=+2098.588339603" observedRunningTime="2025-12-05 11:42:44.965510203 +0000 UTC m=+2099.107632966" watchObservedRunningTime="2025-12-05 11:42:44.968711595 +0000 UTC m=+2099.110834288" Dec 05 11:42:48 crc kubenswrapper[4728]: I1205 11:42:48.052367 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-xwznt"] Dec 05 11:42:48 crc kubenswrapper[4728]: I1205 11:42:48.061833 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-xwznt"] Dec 05 11:42:48 crc kubenswrapper[4728]: I1205 11:42:48.362414 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0b71e7d-96b0-4ce9-bbb4-bc168495b082" path="/var/lib/kubelet/pods/a0b71e7d-96b0-4ce9-bbb4-bc168495b082/volumes" Dec 05 11:42:52 crc kubenswrapper[4728]: I1205 11:42:52.020076 4728 generic.go:334] "Generic (PLEG): container finished" podID="63543ad1-6aa4-4b72-aa6d-4438fad98d08" containerID="39beeb32e865948deea06fff5599d2d47a42f3f787054dc2eb3de18dda2bef5f" exitCode=0 Dec 05 11:42:52 crc kubenswrapper[4728]: I1205 11:42:52.020168 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" event={"ID":"63543ad1-6aa4-4b72-aa6d-4438fad98d08","Type":"ContainerDied","Data":"39beeb32e865948deea06fff5599d2d47a42f3f787054dc2eb3de18dda2bef5f"} Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.474897 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.536728 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam\") pod \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.537034 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwnqj\" (UniqueName: \"kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj\") pod \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.537111 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0\") pod \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\" (UID: \"63543ad1-6aa4-4b72-aa6d-4438fad98d08\") " Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.550010 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj" (OuterVolumeSpecName: "kube-api-access-vwnqj") pod "63543ad1-6aa4-4b72-aa6d-4438fad98d08" (UID: "63543ad1-6aa4-4b72-aa6d-4438fad98d08"). InnerVolumeSpecName "kube-api-access-vwnqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.583861 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "63543ad1-6aa4-4b72-aa6d-4438fad98d08" (UID: "63543ad1-6aa4-4b72-aa6d-4438fad98d08"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.584319 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "63543ad1-6aa4-4b72-aa6d-4438fad98d08" (UID: "63543ad1-6aa4-4b72-aa6d-4438fad98d08"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.639573 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwnqj\" (UniqueName: \"kubernetes.io/projected/63543ad1-6aa4-4b72-aa6d-4438fad98d08-kube-api-access-vwnqj\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.639643 4728 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:53 crc kubenswrapper[4728]: I1205 11:42:53.639662 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/63543ad1-6aa4-4b72-aa6d-4438fad98d08-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.044204 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" event={"ID":"63543ad1-6aa4-4b72-aa6d-4438fad98d08","Type":"ContainerDied","Data":"dc3233c45f56c663c52ce6207d7bc567c7583c41b544b91b5740039c651c1c2c"} Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.044658 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc3233c45f56c663c52ce6207d7bc567c7583c41b544b91b5740039c651c1c2c" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.044273 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p6mdl" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.547231 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f"] Dec 05 11:42:54 crc kubenswrapper[4728]: E1205 11:42:54.547661 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63543ad1-6aa4-4b72-aa6d-4438fad98d08" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.547676 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="63543ad1-6aa4-4b72-aa6d-4438fad98d08" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.547947 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="63543ad1-6aa4-4b72-aa6d-4438fad98d08" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.548630 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.552115 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.552418 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.552654 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.552853 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.561375 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f"] Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.668746 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.668916 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw9xk\" (UniqueName: \"kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.668953 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.771168 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw9xk\" (UniqueName: \"kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.771225 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.771351 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.775410 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.775493 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.796582 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw9xk\" (UniqueName: \"kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7jz8f\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:54 crc kubenswrapper[4728]: I1205 11:42:54.875288 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:42:55 crc kubenswrapper[4728]: I1205 11:42:55.408690 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f"] Dec 05 11:42:55 crc kubenswrapper[4728]: I1205 11:42:55.702267 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:42:55 crc kubenswrapper[4728]: I1205 11:42:55.702367 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:42:56 crc kubenswrapper[4728]: I1205 11:42:56.064160 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" event={"ID":"45fff4a4-1d89-41c0-a166-935f921ad8ec","Type":"ContainerStarted","Data":"483d981bf8cb981b7dfd89decdd9016720a191f26c61939dbdd6a65fb6a49978"} Dec 05 11:42:57 crc kubenswrapper[4728]: I1205 11:42:57.073077 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" event={"ID":"45fff4a4-1d89-41c0-a166-935f921ad8ec","Type":"ContainerStarted","Data":"6297705ceeee87090ff680a065d8b345b7481aae70d594abb7443ac35702eda7"} Dec 05 11:42:57 crc kubenswrapper[4728]: I1205 11:42:57.099387 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" podStartSLOduration=2.678895256 podStartE2EDuration="3.099370041s" podCreationTimestamp="2025-12-05 11:42:54 +0000 UTC" firstStartedPulling="2025-12-05 11:42:55.416427956 +0000 UTC m=+2109.558550649" lastFinishedPulling="2025-12-05 11:42:55.836902731 +0000 UTC m=+2109.979025434" observedRunningTime="2025-12-05 11:42:57.092041262 +0000 UTC m=+2111.234163955" watchObservedRunningTime="2025-12-05 11:42:57.099370041 +0000 UTC m=+2111.241492734" Dec 05 11:43:00 crc kubenswrapper[4728]: I1205 11:43:00.598594 4728 scope.go:117] "RemoveContainer" containerID="b1909c12baffefe7a87547b87ac3efff4c8d6a0d7ce4f7abd4a5110d89613dbf" Dec 05 11:43:00 crc kubenswrapper[4728]: I1205 11:43:00.696543 4728 scope.go:117] "RemoveContainer" containerID="2662f7d0df1631a589d833aac1575bcf135fc313919f4dcb1edde8a55a803594" Dec 05 11:43:00 crc kubenswrapper[4728]: I1205 11:43:00.747241 4728 scope.go:117] "RemoveContainer" containerID="bdf98166550c591cad71522435d651aa191745c5dd5cf6e1224e03dc98a3f859" Dec 05 11:43:05 crc kubenswrapper[4728]: I1205 11:43:05.148702 4728 generic.go:334] "Generic (PLEG): container finished" podID="45fff4a4-1d89-41c0-a166-935f921ad8ec" containerID="6297705ceeee87090ff680a065d8b345b7481aae70d594abb7443ac35702eda7" exitCode=0 Dec 05 11:43:05 crc kubenswrapper[4728]: I1205 11:43:05.148820 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" event={"ID":"45fff4a4-1d89-41c0-a166-935f921ad8ec","Type":"ContainerDied","Data":"6297705ceeee87090ff680a065d8b345b7481aae70d594abb7443ac35702eda7"} Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.585079 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.709647 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key\") pod \"45fff4a4-1d89-41c0-a166-935f921ad8ec\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.709744 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw9xk\" (UniqueName: \"kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk\") pod \"45fff4a4-1d89-41c0-a166-935f921ad8ec\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.709814 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory\") pod \"45fff4a4-1d89-41c0-a166-935f921ad8ec\" (UID: \"45fff4a4-1d89-41c0-a166-935f921ad8ec\") " Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.745452 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk" (OuterVolumeSpecName: "kube-api-access-nw9xk") pod "45fff4a4-1d89-41c0-a166-935f921ad8ec" (UID: "45fff4a4-1d89-41c0-a166-935f921ad8ec"). InnerVolumeSpecName "kube-api-access-nw9xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.776253 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory" (OuterVolumeSpecName: "inventory") pod "45fff4a4-1d89-41c0-a166-935f921ad8ec" (UID: "45fff4a4-1d89-41c0-a166-935f921ad8ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.812101 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw9xk\" (UniqueName: \"kubernetes.io/projected/45fff4a4-1d89-41c0-a166-935f921ad8ec-kube-api-access-nw9xk\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.812329 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.826102 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "45fff4a4-1d89-41c0-a166-935f921ad8ec" (UID: "45fff4a4-1d89-41c0-a166-935f921ad8ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:43:06 crc kubenswrapper[4728]: I1205 11:43:06.914464 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/45fff4a4-1d89-41c0-a166-935f921ad8ec-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.170023 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" event={"ID":"45fff4a4-1d89-41c0-a166-935f921ad8ec","Type":"ContainerDied","Data":"483d981bf8cb981b7dfd89decdd9016720a191f26c61939dbdd6a65fb6a49978"} Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.170472 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483d981bf8cb981b7dfd89decdd9016720a191f26c61939dbdd6a65fb6a49978" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.170072 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7jz8f" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.271837 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7"] Dec 05 11:43:07 crc kubenswrapper[4728]: E1205 11:43:07.272226 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45fff4a4-1d89-41c0-a166-935f921ad8ec" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.272240 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="45fff4a4-1d89-41c0-a166-935f921ad8ec" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.272442 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="45fff4a4-1d89-41c0-a166-935f921ad8ec" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.273086 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.275097 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.275868 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.275910 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.283172 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7"] Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.283269 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.433517 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l62s2\" (UniqueName: \"kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.433871 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.434332 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.535731 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.535950 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.536003 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l62s2\" (UniqueName: \"kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.539654 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.542070 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.563766 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l62s2\" (UniqueName: \"kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:07 crc kubenswrapper[4728]: I1205 11:43:07.602516 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.150065 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7"] Dec 05 11:43:08 crc kubenswrapper[4728]: W1205 11:43:08.153863 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe7ac6c7_643c_42b4_bae5_0eab2ee3aea0.slice/crio-b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04 WatchSource:0}: Error finding container b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04: Status 404 returned error can't find the container with id b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04 Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.157010 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.184160 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" event={"ID":"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0","Type":"ContainerStarted","Data":"b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04"} Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.693425 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.697741 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.719879 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.860675 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lkwr\" (UniqueName: \"kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.860730 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.860996 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.963196 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.963368 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lkwr\" (UniqueName: \"kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.963394 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.963658 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.963757 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:08 crc kubenswrapper[4728]: I1205 11:43:08.984741 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lkwr\" (UniqueName: \"kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr\") pod \"redhat-operators-x4dqn\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:09 crc kubenswrapper[4728]: I1205 11:43:09.038141 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:09 crc kubenswrapper[4728]: I1205 11:43:09.199651 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" event={"ID":"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0","Type":"ContainerStarted","Data":"4c20fc4cf133ddfffe653f9842d0781bcbf93b28df753411db7f710179bf7ef0"} Dec 05 11:43:09 crc kubenswrapper[4728]: I1205 11:43:09.224376 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" podStartSLOduration=1.788924634 podStartE2EDuration="2.224358874s" podCreationTimestamp="2025-12-05 11:43:07 +0000 UTC" firstStartedPulling="2025-12-05 11:43:08.156708238 +0000 UTC m=+2122.298830931" lastFinishedPulling="2025-12-05 11:43:08.592142468 +0000 UTC m=+2122.734265171" observedRunningTime="2025-12-05 11:43:09.219575931 +0000 UTC m=+2123.361698654" watchObservedRunningTime="2025-12-05 11:43:09.224358874 +0000 UTC m=+2123.366481567" Dec 05 11:43:09 crc kubenswrapper[4728]: I1205 11:43:09.570276 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:09 crc kubenswrapper[4728]: W1205 11:43:09.581669 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cac5ea8_4806_44d2_bd18_9adabcadf8d1.slice/crio-40c48f42b0ed24bff3e6d42b3a51df2c7f90095c521083c7d02eca13ddf535c6 WatchSource:0}: Error finding container 40c48f42b0ed24bff3e6d42b3a51df2c7f90095c521083c7d02eca13ddf535c6: Status 404 returned error can't find the container with id 40c48f42b0ed24bff3e6d42b3a51df2c7f90095c521083c7d02eca13ddf535c6 Dec 05 11:43:10 crc kubenswrapper[4728]: I1205 11:43:10.210398 4728 generic.go:334] "Generic (PLEG): container finished" podID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerID="8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31" exitCode=0 Dec 05 11:43:10 crc kubenswrapper[4728]: I1205 11:43:10.210664 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerDied","Data":"8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31"} Dec 05 11:43:10 crc kubenswrapper[4728]: I1205 11:43:10.210719 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerStarted","Data":"40c48f42b0ed24bff3e6d42b3a51df2c7f90095c521083c7d02eca13ddf535c6"} Dec 05 11:43:11 crc kubenswrapper[4728]: I1205 11:43:11.228526 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerStarted","Data":"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74"} Dec 05 11:43:14 crc kubenswrapper[4728]: I1205 11:43:14.254565 4728 generic.go:334] "Generic (PLEG): container finished" podID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerID="40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74" exitCode=0 Dec 05 11:43:14 crc kubenswrapper[4728]: I1205 11:43:14.254758 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerDied","Data":"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74"} Dec 05 11:43:15 crc kubenswrapper[4728]: I1205 11:43:15.266274 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerStarted","Data":"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2"} Dec 05 11:43:15 crc kubenswrapper[4728]: I1205 11:43:15.295641 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x4dqn" podStartSLOduration=2.505537148 podStartE2EDuration="7.29561864s" podCreationTimestamp="2025-12-05 11:43:08 +0000 UTC" firstStartedPulling="2025-12-05 11:43:10.21249775 +0000 UTC m=+2124.354620443" lastFinishedPulling="2025-12-05 11:43:15.002579242 +0000 UTC m=+2129.144701935" observedRunningTime="2025-12-05 11:43:15.284732579 +0000 UTC m=+2129.426855292" watchObservedRunningTime="2025-12-05 11:43:15.29561864 +0000 UTC m=+2129.437741353" Dec 05 11:43:19 crc kubenswrapper[4728]: I1205 11:43:19.039043 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:19 crc kubenswrapper[4728]: I1205 11:43:19.039519 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:19 crc kubenswrapper[4728]: I1205 11:43:19.298549 4728 generic.go:334] "Generic (PLEG): container finished" podID="be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" containerID="4c20fc4cf133ddfffe653f9842d0781bcbf93b28df753411db7f710179bf7ef0" exitCode=0 Dec 05 11:43:19 crc kubenswrapper[4728]: I1205 11:43:19.298604 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" event={"ID":"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0","Type":"ContainerDied","Data":"4c20fc4cf133ddfffe653f9842d0781bcbf93b28df753411db7f710179bf7ef0"} Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.098632 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x4dqn" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="registry-server" probeResult="failure" output=< Dec 05 11:43:20 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 11:43:20 crc kubenswrapper[4728]: > Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.780177 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.906977 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l62s2\" (UniqueName: \"kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2\") pod \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.907323 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory\") pod \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.907537 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key\") pod \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\" (UID: \"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0\") " Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.929984 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2" (OuterVolumeSpecName: "kube-api-access-l62s2") pod "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" (UID: "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0"). InnerVolumeSpecName "kube-api-access-l62s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.936784 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory" (OuterVolumeSpecName: "inventory") pod "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" (UID: "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:43:20 crc kubenswrapper[4728]: I1205 11:43:20.944180 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" (UID: "be7ac6c7-643c-42b4-bae5-0eab2ee3aea0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.010590 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.010631 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l62s2\" (UniqueName: \"kubernetes.io/projected/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-kube-api-access-l62s2\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.010645 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be7ac6c7-643c-42b4-bae5-0eab2ee3aea0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.318900 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" event={"ID":"be7ac6c7-643c-42b4-bae5-0eab2ee3aea0","Type":"ContainerDied","Data":"b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04"} Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.318967 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.318968 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5fea6211968bcbe4379004742402f16f460fb267478f11de0ae6671de807d04" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.410002 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4"] Dec 05 11:43:21 crc kubenswrapper[4728]: E1205 11:43:21.410389 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.410408 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.410596 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="be7ac6c7-643c-42b4-bae5-0eab2ee3aea0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.411334 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.413442 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.413642 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.414125 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.414220 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.414566 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.414707 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.414859 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.415118 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.431097 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4"] Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519370 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519414 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519629 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519677 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519716 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519880 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.519960 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpm6r\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.520047 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.520154 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.520250 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.520851 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.521157 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.521261 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.521288 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622511 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622585 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpm6r\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622617 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622662 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622702 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622733 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622762 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622820 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622843 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622865 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.622886 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.623100 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.623134 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.623155 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.636440 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.636808 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.636997 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.638381 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.639220 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.639430 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.639585 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.639783 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.639979 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.640661 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.641981 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.642499 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.645034 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.646049 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpm6r\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wscr4\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:21 crc kubenswrapper[4728]: I1205 11:43:21.727887 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:43:22 crc kubenswrapper[4728]: I1205 11:43:22.324464 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4"] Dec 05 11:43:23 crc kubenswrapper[4728]: I1205 11:43:23.336370 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" event={"ID":"c307593d-70fb-42ac-987a-9e7639f530c6","Type":"ContainerStarted","Data":"b7bbcf9691feefa05d52bc9aa6ea2cd3ea146d76ac7edbf93575fcac69aa40f0"} Dec 05 11:43:23 crc kubenswrapper[4728]: I1205 11:43:23.336668 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" event={"ID":"c307593d-70fb-42ac-987a-9e7639f530c6","Type":"ContainerStarted","Data":"ab6afba5adc0113b705397def175efddebf83ba6185ba86315aad422a6f5d6d3"} Dec 05 11:43:25 crc kubenswrapper[4728]: I1205 11:43:25.701977 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:43:25 crc kubenswrapper[4728]: I1205 11:43:25.702581 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:43:29 crc kubenswrapper[4728]: I1205 11:43:29.094262 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:29 crc kubenswrapper[4728]: I1205 11:43:29.120333 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" podStartSLOduration=7.726239923 podStartE2EDuration="8.120312057s" podCreationTimestamp="2025-12-05 11:43:21 +0000 UTC" firstStartedPulling="2025-12-05 11:43:22.33362883 +0000 UTC m=+2136.475751523" lastFinishedPulling="2025-12-05 11:43:22.727700964 +0000 UTC m=+2136.869823657" observedRunningTime="2025-12-05 11:43:23.363347258 +0000 UTC m=+2137.505470041" watchObservedRunningTime="2025-12-05 11:43:29.120312057 +0000 UTC m=+2143.262434760" Dec 05 11:43:29 crc kubenswrapper[4728]: I1205 11:43:29.143141 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:29 crc kubenswrapper[4728]: I1205 11:43:29.334374 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:30 crc kubenswrapper[4728]: I1205 11:43:30.408125 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x4dqn" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="registry-server" containerID="cri-o://6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2" gracePeriod=2 Dec 05 11:43:30 crc kubenswrapper[4728]: I1205 11:43:30.898353 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.007026 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities\") pod \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.007137 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lkwr\" (UniqueName: \"kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr\") pod \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.007188 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content\") pod \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\" (UID: \"9cac5ea8-4806-44d2-bd18-9adabcadf8d1\") " Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.007761 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities" (OuterVolumeSpecName: "utilities") pod "9cac5ea8-4806-44d2-bd18-9adabcadf8d1" (UID: "9cac5ea8-4806-44d2-bd18-9adabcadf8d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.015771 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr" (OuterVolumeSpecName: "kube-api-access-5lkwr") pod "9cac5ea8-4806-44d2-bd18-9adabcadf8d1" (UID: "9cac5ea8-4806-44d2-bd18-9adabcadf8d1"). InnerVolumeSpecName "kube-api-access-5lkwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.109178 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.109207 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lkwr\" (UniqueName: \"kubernetes.io/projected/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-kube-api-access-5lkwr\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.124202 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cac5ea8-4806-44d2-bd18-9adabcadf8d1" (UID: "9cac5ea8-4806-44d2-bd18-9adabcadf8d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.211566 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cac5ea8-4806-44d2-bd18-9adabcadf8d1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.416230 4728 generic.go:334] "Generic (PLEG): container finished" podID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerID="6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2" exitCode=0 Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.416272 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerDied","Data":"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2"} Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.416308 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x4dqn" event={"ID":"9cac5ea8-4806-44d2-bd18-9adabcadf8d1","Type":"ContainerDied","Data":"40c48f42b0ed24bff3e6d42b3a51df2c7f90095c521083c7d02eca13ddf535c6"} Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.416328 4728 scope.go:117] "RemoveContainer" containerID="6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.417395 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x4dqn" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.438499 4728 scope.go:117] "RemoveContainer" containerID="40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.453369 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.464404 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x4dqn"] Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.470126 4728 scope.go:117] "RemoveContainer" containerID="8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.507663 4728 scope.go:117] "RemoveContainer" containerID="6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2" Dec 05 11:43:31 crc kubenswrapper[4728]: E1205 11:43:31.508183 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2\": container with ID starting with 6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2 not found: ID does not exist" containerID="6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.508221 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2"} err="failed to get container status \"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2\": rpc error: code = NotFound desc = could not find container \"6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2\": container with ID starting with 6f0a408495a65407851cd8d82eba46382935e533ec80b85601a874ab8f2b78a2 not found: ID does not exist" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.508247 4728 scope.go:117] "RemoveContainer" containerID="40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74" Dec 05 11:43:31 crc kubenswrapper[4728]: E1205 11:43:31.509330 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74\": container with ID starting with 40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74 not found: ID does not exist" containerID="40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.509352 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74"} err="failed to get container status \"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74\": rpc error: code = NotFound desc = could not find container \"40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74\": container with ID starting with 40a72b58a77e06592d647159d8696d8a6681fd45d5ebe263353c1757bf6b5c74 not found: ID does not exist" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.509365 4728 scope.go:117] "RemoveContainer" containerID="8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31" Dec 05 11:43:31 crc kubenswrapper[4728]: E1205 11:43:31.509700 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31\": container with ID starting with 8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31 not found: ID does not exist" containerID="8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31" Dec 05 11:43:31 crc kubenswrapper[4728]: I1205 11:43:31.509748 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31"} err="failed to get container status \"8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31\": rpc error: code = NotFound desc = could not find container \"8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31\": container with ID starting with 8597a31f48df00963be54c30b8af04d1326f6ac3f29e470ba877b89c4262fc31 not found: ID does not exist" Dec 05 11:43:32 crc kubenswrapper[4728]: I1205 11:43:32.361322 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" path="/var/lib/kubelet/pods/9cac5ea8-4806-44d2-bd18-9adabcadf8d1/volumes" Dec 05 11:43:55 crc kubenswrapper[4728]: I1205 11:43:55.702004 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:43:55 crc kubenswrapper[4728]: I1205 11:43:55.702912 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:43:55 crc kubenswrapper[4728]: I1205 11:43:55.702989 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:43:55 crc kubenswrapper[4728]: I1205 11:43:55.704258 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:43:55 crc kubenswrapper[4728]: I1205 11:43:55.704395 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84" gracePeriod=600 Dec 05 11:43:56 crc kubenswrapper[4728]: I1205 11:43:56.672782 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84" exitCode=0 Dec 05 11:43:56 crc kubenswrapper[4728]: I1205 11:43:56.672832 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84"} Dec 05 11:43:56 crc kubenswrapper[4728]: I1205 11:43:56.673425 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e"} Dec 05 11:43:56 crc kubenswrapper[4728]: I1205 11:43:56.673449 4728 scope.go:117] "RemoveContainer" containerID="9ec8fcb9abf0c27ff7684aed90530899b9635c169a8bf34693a24b2f503deb1e" Dec 05 11:44:01 crc kubenswrapper[4728]: I1205 11:44:01.728765 4728 generic.go:334] "Generic (PLEG): container finished" podID="c307593d-70fb-42ac-987a-9e7639f530c6" containerID="b7bbcf9691feefa05d52bc9aa6ea2cd3ea146d76ac7edbf93575fcac69aa40f0" exitCode=0 Dec 05 11:44:01 crc kubenswrapper[4728]: I1205 11:44:01.728878 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" event={"ID":"c307593d-70fb-42ac-987a-9e7639f530c6","Type":"ContainerDied","Data":"b7bbcf9691feefa05d52bc9aa6ea2cd3ea146d76ac7edbf93575fcac69aa40f0"} Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.156674 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210611 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210702 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210760 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210835 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210886 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210924 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.210947 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpm6r\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211020 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211053 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211092 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211201 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211288 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211309 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.211338 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key\") pod \"c307593d-70fb-42ac-987a-9e7639f530c6\" (UID: \"c307593d-70fb-42ac-987a-9e7639f530c6\") " Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.217718 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.221066 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.221213 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.221878 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.222629 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.222826 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.223869 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r" (OuterVolumeSpecName: "kube-api-access-dpm6r") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "kube-api-access-dpm6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.224417 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.226209 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.226494 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.226960 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.228779 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.250001 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.253275 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory" (OuterVolumeSpecName: "inventory") pod "c307593d-70fb-42ac-987a-9e7639f530c6" (UID: "c307593d-70fb-42ac-987a-9e7639f530c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313495 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313530 4728 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313547 4728 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313580 4728 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313593 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313607 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313620 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpm6r\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-kube-api-access-dpm6r\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313631 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313642 4728 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313655 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c307593d-70fb-42ac-987a-9e7639f530c6-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313666 4728 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313678 4728 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313690 4728 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.313700 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c307593d-70fb-42ac-987a-9e7639f530c6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.756648 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" event={"ID":"c307593d-70fb-42ac-987a-9e7639f530c6","Type":"ContainerDied","Data":"ab6afba5adc0113b705397def175efddebf83ba6185ba86315aad422a6f5d6d3"} Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.756728 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab6afba5adc0113b705397def175efddebf83ba6185ba86315aad422a6f5d6d3" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.757122 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wscr4" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.976917 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd"] Dec 05 11:44:03 crc kubenswrapper[4728]: E1205 11:44:03.977650 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c307593d-70fb-42ac-987a-9e7639f530c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.977701 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c307593d-70fb-42ac-987a-9e7639f530c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:44:03 crc kubenswrapper[4728]: E1205 11:44:03.977751 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="registry-server" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.977767 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="registry-server" Dec 05 11:44:03 crc kubenswrapper[4728]: E1205 11:44:03.977858 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="extract-content" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.977873 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="extract-content" Dec 05 11:44:03 crc kubenswrapper[4728]: E1205 11:44:03.977913 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="extract-utilities" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.977930 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="extract-utilities" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.978378 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c307593d-70fb-42ac-987a-9e7639f530c6" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.978415 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cac5ea8-4806-44d2-bd18-9adabcadf8d1" containerName="registry-server" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.979730 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.982569 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.983777 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.984159 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.986471 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.987114 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:44:03 crc kubenswrapper[4728]: I1205 11:44:03.990757 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd"] Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.030868 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.030964 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.031129 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.031219 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.031330 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9p7g\" (UniqueName: \"kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.132874 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.132986 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.133054 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9p7g\" (UniqueName: \"kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.133137 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.133166 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.135068 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.139397 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.139409 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.139443 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.154219 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9p7g\" (UniqueName: \"kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tszdd\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.305899 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:44:04 crc kubenswrapper[4728]: I1205 11:44:04.861981 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd"] Dec 05 11:44:05 crc kubenswrapper[4728]: I1205 11:44:05.781621 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" event={"ID":"e1b21c39-5973-43f9-a5f5-73f7e3a1f778","Type":"ContainerStarted","Data":"daebb25772b14648459ea306a7ab424ff76bbd079b6086a2fa489b84d111bdd1"} Dec 05 11:44:05 crc kubenswrapper[4728]: I1205 11:44:05.782641 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" event={"ID":"e1b21c39-5973-43f9-a5f5-73f7e3a1f778","Type":"ContainerStarted","Data":"06b48d92bcf56adad82cd6d3eed45c3a87dc3714487da73dbee1360597b35f91"} Dec 05 11:44:05 crc kubenswrapper[4728]: I1205 11:44:05.816635 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" podStartSLOduration=2.338545656 podStartE2EDuration="2.816607346s" podCreationTimestamp="2025-12-05 11:44:03 +0000 UTC" firstStartedPulling="2025-12-05 11:44:04.864974432 +0000 UTC m=+2179.007097125" lastFinishedPulling="2025-12-05 11:44:05.343036112 +0000 UTC m=+2179.485158815" observedRunningTime="2025-12-05 11:44:05.802039761 +0000 UTC m=+2179.944162464" watchObservedRunningTime="2025-12-05 11:44:05.816607346 +0000 UTC m=+2179.958730079" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.152743 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh"] Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.157265 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.165517 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.165719 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.171012 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh"] Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.294430 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjvjm\" (UniqueName: \"kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.294540 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.294565 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.396882 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjvjm\" (UniqueName: \"kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.397044 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.397077 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.398184 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.405683 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.419164 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjvjm\" (UniqueName: \"kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm\") pod \"collect-profiles-29415585-rplrh\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.477107 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:00 crc kubenswrapper[4728]: I1205 11:45:00.964169 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh"] Dec 05 11:45:01 crc kubenswrapper[4728]: I1205 11:45:01.329694 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" event={"ID":"91819e1a-92e1-4893-b375-90264108905d","Type":"ContainerStarted","Data":"c312adeed26f1b7a5ea1007299ad9183cbd000672b0d1db5f804d99a0ef7b49c"} Dec 05 11:45:01 crc kubenswrapper[4728]: I1205 11:45:01.329984 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" event={"ID":"91819e1a-92e1-4893-b375-90264108905d","Type":"ContainerStarted","Data":"502480260aa02e73d9721cdd6c5edf4a1de424818549ebd04a0db89fdafe63f6"} Dec 05 11:45:01 crc kubenswrapper[4728]: I1205 11:45:01.346652 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" podStartSLOduration=1.346634794 podStartE2EDuration="1.346634794s" podCreationTimestamp="2025-12-05 11:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:45:01.342011285 +0000 UTC m=+2235.484133988" watchObservedRunningTime="2025-12-05 11:45:01.346634794 +0000 UTC m=+2235.488757487" Dec 05 11:45:02 crc kubenswrapper[4728]: I1205 11:45:02.338895 4728 generic.go:334] "Generic (PLEG): container finished" podID="91819e1a-92e1-4893-b375-90264108905d" containerID="c312adeed26f1b7a5ea1007299ad9183cbd000672b0d1db5f804d99a0ef7b49c" exitCode=0 Dec 05 11:45:02 crc kubenswrapper[4728]: I1205 11:45:02.338949 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" event={"ID":"91819e1a-92e1-4893-b375-90264108905d","Type":"ContainerDied","Data":"c312adeed26f1b7a5ea1007299ad9183cbd000672b0d1db5f804d99a0ef7b49c"} Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.673030 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.766010 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume\") pod \"91819e1a-92e1-4893-b375-90264108905d\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.766172 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume\") pod \"91819e1a-92e1-4893-b375-90264108905d\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.766224 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjvjm\" (UniqueName: \"kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm\") pod \"91819e1a-92e1-4893-b375-90264108905d\" (UID: \"91819e1a-92e1-4893-b375-90264108905d\") " Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.766999 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume" (OuterVolumeSpecName: "config-volume") pod "91819e1a-92e1-4893-b375-90264108905d" (UID: "91819e1a-92e1-4893-b375-90264108905d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.772452 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm" (OuterVolumeSpecName: "kube-api-access-vjvjm") pod "91819e1a-92e1-4893-b375-90264108905d" (UID: "91819e1a-92e1-4893-b375-90264108905d"). InnerVolumeSpecName "kube-api-access-vjvjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.773081 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "91819e1a-92e1-4893-b375-90264108905d" (UID: "91819e1a-92e1-4893-b375-90264108905d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.868989 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91819e1a-92e1-4893-b375-90264108905d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.869026 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91819e1a-92e1-4893-b375-90264108905d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:03 crc kubenswrapper[4728]: I1205 11:45:03.869038 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjvjm\" (UniqueName: \"kubernetes.io/projected/91819e1a-92e1-4893-b375-90264108905d-kube-api-access-vjvjm\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:04 crc kubenswrapper[4728]: I1205 11:45:04.362702 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" Dec 05 11:45:04 crc kubenswrapper[4728]: I1205 11:45:04.367254 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh" event={"ID":"91819e1a-92e1-4893-b375-90264108905d","Type":"ContainerDied","Data":"502480260aa02e73d9721cdd6c5edf4a1de424818549ebd04a0db89fdafe63f6"} Dec 05 11:45:04 crc kubenswrapper[4728]: I1205 11:45:04.367809 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="502480260aa02e73d9721cdd6c5edf4a1de424818549ebd04a0db89fdafe63f6" Dec 05 11:45:04 crc kubenswrapper[4728]: I1205 11:45:04.424385 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5"] Dec 05 11:45:04 crc kubenswrapper[4728]: I1205 11:45:04.435834 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-x94v5"] Dec 05 11:45:06 crc kubenswrapper[4728]: I1205 11:45:06.372586 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86cfa1e7-7206-404d-bc2d-bb34f50980ef" path="/var/lib/kubelet/pods/86cfa1e7-7206-404d-bc2d-bb34f50980ef/volumes" Dec 05 11:45:09 crc kubenswrapper[4728]: I1205 11:45:09.415975 4728 generic.go:334] "Generic (PLEG): container finished" podID="e1b21c39-5973-43f9-a5f5-73f7e3a1f778" containerID="daebb25772b14648459ea306a7ab424ff76bbd079b6086a2fa489b84d111bdd1" exitCode=0 Dec 05 11:45:09 crc kubenswrapper[4728]: I1205 11:45:09.416072 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" event={"ID":"e1b21c39-5973-43f9-a5f5-73f7e3a1f778","Type":"ContainerDied","Data":"daebb25772b14648459ea306a7ab424ff76bbd079b6086a2fa489b84d111bdd1"} Dec 05 11:45:10 crc kubenswrapper[4728]: I1205 11:45:10.950108 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.080661 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key\") pod \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.080782 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory\") pod \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.080997 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0\") pod \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.081054 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle\") pod \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.081295 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9p7g\" (UniqueName: \"kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g\") pod \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\" (UID: \"e1b21c39-5973-43f9-a5f5-73f7e3a1f778\") " Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.086260 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e1b21c39-5973-43f9-a5f5-73f7e3a1f778" (UID: "e1b21c39-5973-43f9-a5f5-73f7e3a1f778"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.088054 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g" (OuterVolumeSpecName: "kube-api-access-r9p7g") pod "e1b21c39-5973-43f9-a5f5-73f7e3a1f778" (UID: "e1b21c39-5973-43f9-a5f5-73f7e3a1f778"). InnerVolumeSpecName "kube-api-access-r9p7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.115658 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e1b21c39-5973-43f9-a5f5-73f7e3a1f778" (UID: "e1b21c39-5973-43f9-a5f5-73f7e3a1f778"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.119625 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory" (OuterVolumeSpecName: "inventory") pod "e1b21c39-5973-43f9-a5f5-73f7e3a1f778" (UID: "e1b21c39-5973-43f9-a5f5-73f7e3a1f778"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.122665 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "e1b21c39-5973-43f9-a5f5-73f7e3a1f778" (UID: "e1b21c39-5973-43f9-a5f5-73f7e3a1f778"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.184068 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9p7g\" (UniqueName: \"kubernetes.io/projected/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-kube-api-access-r9p7g\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.184115 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.184133 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.184150 4728 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.184169 4728 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1b21c39-5973-43f9-a5f5-73f7e3a1f778-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.444126 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" event={"ID":"e1b21c39-5973-43f9-a5f5-73f7e3a1f778","Type":"ContainerDied","Data":"06b48d92bcf56adad82cd6d3eed45c3a87dc3714487da73dbee1360597b35f91"} Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.444178 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06b48d92bcf56adad82cd6d3eed45c3a87dc3714487da73dbee1360597b35f91" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.444295 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tszdd" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.543438 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj"] Dec 05 11:45:11 crc kubenswrapper[4728]: E1205 11:45:11.543865 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91819e1a-92e1-4893-b375-90264108905d" containerName="collect-profiles" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.543885 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="91819e1a-92e1-4893-b375-90264108905d" containerName="collect-profiles" Dec 05 11:45:11 crc kubenswrapper[4728]: E1205 11:45:11.543939 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b21c39-5973-43f9-a5f5-73f7e3a1f778" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.543949 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b21c39-5973-43f9-a5f5-73f7e3a1f778" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.544175 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="91819e1a-92e1-4893-b375-90264108905d" containerName="collect-profiles" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.544216 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1b21c39-5973-43f9-a5f5-73f7e3a1f778" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.544950 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.546585 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.547460 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.549443 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.549701 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.555264 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.555485 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.559985 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj"] Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.692718 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-928t6\" (UniqueName: \"kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.692869 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.692947 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.692992 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.693030 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.693301 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.796057 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.796389 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-928t6\" (UniqueName: \"kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.796677 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.796967 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.797057 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.797129 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.803557 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.803635 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.803703 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.804272 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.805705 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.825572 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-928t6\" (UniqueName: \"kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:11 crc kubenswrapper[4728]: I1205 11:45:11.880835 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:45:12 crc kubenswrapper[4728]: I1205 11:45:12.443345 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj"] Dec 05 11:45:12 crc kubenswrapper[4728]: W1205 11:45:12.447059 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60e7995e_9ae7_47b3_bd6a_991c444af447.slice/crio-ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff WatchSource:0}: Error finding container ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff: Status 404 returned error can't find the container with id ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff Dec 05 11:45:13 crc kubenswrapper[4728]: I1205 11:45:13.463246 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" event={"ID":"60e7995e-9ae7-47b3-bd6a-991c444af447","Type":"ContainerStarted","Data":"88b648ce67bc5e38c82e4f947ab1d0863d40375fcab7cda8e63b4454fde67876"} Dec 05 11:45:13 crc kubenswrapper[4728]: I1205 11:45:13.463589 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" event={"ID":"60e7995e-9ae7-47b3-bd6a-991c444af447","Type":"ContainerStarted","Data":"ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff"} Dec 05 11:45:13 crc kubenswrapper[4728]: I1205 11:45:13.489117 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" podStartSLOduration=1.969644317 podStartE2EDuration="2.489099274s" podCreationTimestamp="2025-12-05 11:45:11 +0000 UTC" firstStartedPulling="2025-12-05 11:45:12.449654555 +0000 UTC m=+2246.591777258" lastFinishedPulling="2025-12-05 11:45:12.969109512 +0000 UTC m=+2247.111232215" observedRunningTime="2025-12-05 11:45:13.477150406 +0000 UTC m=+2247.619273099" watchObservedRunningTime="2025-12-05 11:45:13.489099274 +0000 UTC m=+2247.631221967" Dec 05 11:46:00 crc kubenswrapper[4728]: I1205 11:46:00.894524 4728 scope.go:117] "RemoveContainer" containerID="4484ac0fae5f3a78b23028149b3ccdf7568a08a491dadd0a87640ab2fcfe956a" Dec 05 11:46:02 crc kubenswrapper[4728]: I1205 11:46:02.040692 4728 generic.go:334] "Generic (PLEG): container finished" podID="60e7995e-9ae7-47b3-bd6a-991c444af447" containerID="88b648ce67bc5e38c82e4f947ab1d0863d40375fcab7cda8e63b4454fde67876" exitCode=0 Dec 05 11:46:02 crc kubenswrapper[4728]: I1205 11:46:02.040743 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" event={"ID":"60e7995e-9ae7-47b3-bd6a-991c444af447","Type":"ContainerDied","Data":"88b648ce67bc5e38c82e4f947ab1d0863d40375fcab7cda8e63b4454fde67876"} Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.616751 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709645 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709754 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-928t6\" (UniqueName: \"kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709898 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709923 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709950 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.709996 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0\") pod \"60e7995e-9ae7-47b3-bd6a-991c444af447\" (UID: \"60e7995e-9ae7-47b3-bd6a-991c444af447\") " Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.715353 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.716301 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6" (OuterVolumeSpecName: "kube-api-access-928t6") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "kube-api-access-928t6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.747492 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.754117 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.759163 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.763982 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory" (OuterVolumeSpecName: "inventory") pod "60e7995e-9ae7-47b3-bd6a-991c444af447" (UID: "60e7995e-9ae7-47b3-bd6a-991c444af447"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816213 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816245 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816255 4728 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816265 4728 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816277 4728 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/60e7995e-9ae7-47b3-bd6a-991c444af447-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:03 crc kubenswrapper[4728]: I1205 11:46:03.816290 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-928t6\" (UniqueName: \"kubernetes.io/projected/60e7995e-9ae7-47b3-bd6a-991c444af447-kube-api-access-928t6\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.070619 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" event={"ID":"60e7995e-9ae7-47b3-bd6a-991c444af447","Type":"ContainerDied","Data":"ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff"} Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.070679 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce1bcf9b2f4bb7a1c4498a69f277be2d68cfbb3166571777afed6cc19fec3aff" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.070762 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.179752 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n"] Dec 05 11:46:04 crc kubenswrapper[4728]: E1205 11:46:04.180408 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e7995e-9ae7-47b3-bd6a-991c444af447" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.180479 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e7995e-9ae7-47b3-bd6a-991c444af447" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.180702 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e7995e-9ae7-47b3-bd6a-991c444af447" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.220333 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.223613 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.224619 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.224943 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.225564 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.226486 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.255237 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n"] Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.325437 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.325649 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.325727 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.325955 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgvw9\" (UniqueName: \"kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.326259 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.428274 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.428343 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.428399 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.428427 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.428461 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgvw9\" (UniqueName: \"kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.432623 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.432781 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.434339 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.436316 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.443740 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgvw9\" (UniqueName: \"kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-h792n\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:04 crc kubenswrapper[4728]: I1205 11:46:04.566394 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:46:05 crc kubenswrapper[4728]: I1205 11:46:05.135380 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n"] Dec 05 11:46:06 crc kubenswrapper[4728]: I1205 11:46:06.088108 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" event={"ID":"21cac74f-ba27-4db1-9cbe-6189f230e514","Type":"ContainerStarted","Data":"60e44c117f8086becfb0cc989de464a7485e717bcb8af285c3b199abf8f1319b"} Dec 05 11:46:06 crc kubenswrapper[4728]: I1205 11:46:06.088615 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" event={"ID":"21cac74f-ba27-4db1-9cbe-6189f230e514","Type":"ContainerStarted","Data":"f5688d72899dc206f7f6cfc7f4d81cf4fcc11cac12486520e1a0e86751cf9114"} Dec 05 11:46:06 crc kubenswrapper[4728]: I1205 11:46:06.107294 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" podStartSLOduration=1.491056133 podStartE2EDuration="2.107268372s" podCreationTimestamp="2025-12-05 11:46:04 +0000 UTC" firstStartedPulling="2025-12-05 11:46:05.152223335 +0000 UTC m=+2299.294346028" lastFinishedPulling="2025-12-05 11:46:05.768435574 +0000 UTC m=+2299.910558267" observedRunningTime="2025-12-05 11:46:06.100476507 +0000 UTC m=+2300.242599200" watchObservedRunningTime="2025-12-05 11:46:06.107268372 +0000 UTC m=+2300.249391075" Dec 05 11:46:25 crc kubenswrapper[4728]: I1205 11:46:25.701720 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:46:25 crc kubenswrapper[4728]: I1205 11:46:25.703198 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:46:55 crc kubenswrapper[4728]: I1205 11:46:55.702037 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:46:55 crc kubenswrapper[4728]: I1205 11:46:55.702644 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.702509 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.703167 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.703220 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.704096 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.704163 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" gracePeriod=600 Dec 05 11:47:25 crc kubenswrapper[4728]: E1205 11:47:25.822894 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.889271 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" exitCode=0 Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.889323 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e"} Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.889371 4728 scope.go:117] "RemoveContainer" containerID="d840a321c62d9c3d3f8b0627cf00f77cef26db26853951536c13412819ea8b84" Dec 05 11:47:25 crc kubenswrapper[4728]: I1205 11:47:25.890091 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:47:25 crc kubenswrapper[4728]: E1205 11:47:25.890438 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:47:37 crc kubenswrapper[4728]: I1205 11:47:37.353419 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:47:37 crc kubenswrapper[4728]: E1205 11:47:37.354555 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:47:51 crc kubenswrapper[4728]: I1205 11:47:51.352620 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:47:51 crc kubenswrapper[4728]: E1205 11:47:51.353222 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:48:02 crc kubenswrapper[4728]: I1205 11:48:02.353095 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:48:02 crc kubenswrapper[4728]: E1205 11:48:02.354221 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:48:13 crc kubenswrapper[4728]: I1205 11:48:13.352081 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:48:13 crc kubenswrapper[4728]: E1205 11:48:13.352868 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:48:25 crc kubenswrapper[4728]: I1205 11:48:25.352720 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:48:25 crc kubenswrapper[4728]: E1205 11:48:25.353754 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:48:40 crc kubenswrapper[4728]: I1205 11:48:40.356107 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:48:40 crc kubenswrapper[4728]: E1205 11:48:40.357262 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:48:54 crc kubenswrapper[4728]: I1205 11:48:54.352545 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:48:54 crc kubenswrapper[4728]: E1205 11:48:54.353914 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:08 crc kubenswrapper[4728]: I1205 11:49:08.352029 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:49:08 crc kubenswrapper[4728]: E1205 11:49:08.352831 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:20 crc kubenswrapper[4728]: I1205 11:49:20.352305 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:49:20 crc kubenswrapper[4728]: E1205 11:49:20.353241 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:31 crc kubenswrapper[4728]: I1205 11:49:31.355783 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:49:31 crc kubenswrapper[4728]: E1205 11:49:31.361271 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:43 crc kubenswrapper[4728]: I1205 11:49:43.351859 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:49:43 crc kubenswrapper[4728]: E1205 11:49:43.352767 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.236661 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.240610 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.250083 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.374066 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.374598 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k4sr\" (UniqueName: \"kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.374718 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.476927 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k4sr\" (UniqueName: \"kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.477003 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.477694 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.477840 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.478070 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.508675 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k4sr\" (UniqueName: \"kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr\") pod \"certified-operators-cvg24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:46 crc kubenswrapper[4728]: I1205 11:49:46.563678 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:47 crc kubenswrapper[4728]: I1205 11:49:47.048689 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:49:47 crc kubenswrapper[4728]: I1205 11:49:47.352015 4728 generic.go:334] "Generic (PLEG): container finished" podID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerID="a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4" exitCode=0 Dec 05 11:49:47 crc kubenswrapper[4728]: I1205 11:49:47.352059 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerDied","Data":"a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4"} Dec 05 11:49:47 crc kubenswrapper[4728]: I1205 11:49:47.352094 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerStarted","Data":"45f6b698d63e17ec07e6aaa9c78ac5b25b82f86b8bb9397b011653a6bdc83d2a"} Dec 05 11:49:47 crc kubenswrapper[4728]: I1205 11:49:47.354366 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.365883 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerStarted","Data":"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441"} Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.433570 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.435875 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.459190 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.517716 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.517822 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6ccr\" (UniqueName: \"kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.517879 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.619554 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6ccr\" (UniqueName: \"kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.619686 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.620210 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.621071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.621453 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.659289 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6ccr\" (UniqueName: \"kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr\") pod \"community-operators-wqqwt\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:48 crc kubenswrapper[4728]: I1205 11:49:48.766166 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.298236 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:49:49 crc kubenswrapper[4728]: W1205 11:49:49.300833 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12a742d2_5e93_4495_a5b6_9e95f2f5ae5a.slice/crio-2ffabf8909d582aa29efbbf60ff21e43a40f9f88f2a07e3b8380d7629eb6bb51 WatchSource:0}: Error finding container 2ffabf8909d582aa29efbbf60ff21e43a40f9f88f2a07e3b8380d7629eb6bb51: Status 404 returned error can't find the container with id 2ffabf8909d582aa29efbbf60ff21e43a40f9f88f2a07e3b8380d7629eb6bb51 Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.373209 4728 generic.go:334] "Generic (PLEG): container finished" podID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerID="5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441" exitCode=0 Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.373298 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerDied","Data":"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441"} Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.374500 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerStarted","Data":"2ffabf8909d582aa29efbbf60ff21e43a40f9f88f2a07e3b8380d7629eb6bb51"} Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.437256 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.439779 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.447827 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.549782 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.549885 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdnn2\" (UniqueName: \"kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.550469 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.652615 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdnn2\" (UniqueName: \"kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.652875 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.652924 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.653445 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.653495 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.672375 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdnn2\" (UniqueName: \"kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2\") pod \"redhat-marketplace-4qkwt\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:49 crc kubenswrapper[4728]: I1205 11:49:49.767011 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:50 crc kubenswrapper[4728]: W1205 11:49:50.286436 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5201134_74f2_4ad3_af57_e4fe0001fc4f.slice/crio-ecdc2eb13e1ef8130c7c634241fce1a3452b22a31d99a2629acbe9156e7a3bae WatchSource:0}: Error finding container ecdc2eb13e1ef8130c7c634241fce1a3452b22a31d99a2629acbe9156e7a3bae: Status 404 returned error can't find the container with id ecdc2eb13e1ef8130c7c634241fce1a3452b22a31d99a2629acbe9156e7a3bae Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.288732 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.388530 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerStarted","Data":"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01"} Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.391587 4728 generic.go:334] "Generic (PLEG): container finished" podID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerID="9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c" exitCode=0 Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.391676 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerDied","Data":"9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c"} Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.394181 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerStarted","Data":"ecdc2eb13e1ef8130c7c634241fce1a3452b22a31d99a2629acbe9156e7a3bae"} Dec 05 11:49:50 crc kubenswrapper[4728]: I1205 11:49:50.414528 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cvg24" podStartSLOduration=1.9210615070000001 podStartE2EDuration="4.414509193s" podCreationTimestamp="2025-12-05 11:49:46 +0000 UTC" firstStartedPulling="2025-12-05 11:49:47.354135125 +0000 UTC m=+2521.496257818" lastFinishedPulling="2025-12-05 11:49:49.847582811 +0000 UTC m=+2523.989705504" observedRunningTime="2025-12-05 11:49:50.406627182 +0000 UTC m=+2524.548749895" watchObservedRunningTime="2025-12-05 11:49:50.414509193 +0000 UTC m=+2524.556631886" Dec 05 11:49:51 crc kubenswrapper[4728]: I1205 11:49:51.404710 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerStarted","Data":"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743"} Dec 05 11:49:51 crc kubenswrapper[4728]: I1205 11:49:51.406376 4728 generic.go:334] "Generic (PLEG): container finished" podID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerID="fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170" exitCode=0 Dec 05 11:49:51 crc kubenswrapper[4728]: I1205 11:49:51.406476 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerDied","Data":"fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170"} Dec 05 11:49:53 crc kubenswrapper[4728]: I1205 11:49:53.425545 4728 generic.go:334] "Generic (PLEG): container finished" podID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerID="5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743" exitCode=0 Dec 05 11:49:53 crc kubenswrapper[4728]: I1205 11:49:53.425574 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerDied","Data":"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743"} Dec 05 11:49:54 crc kubenswrapper[4728]: I1205 11:49:54.438567 4728 generic.go:334] "Generic (PLEG): container finished" podID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerID="a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c" exitCode=0 Dec 05 11:49:54 crc kubenswrapper[4728]: I1205 11:49:54.438612 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerDied","Data":"a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c"} Dec 05 11:49:55 crc kubenswrapper[4728]: I1205 11:49:55.449953 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerStarted","Data":"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33"} Dec 05 11:49:55 crc kubenswrapper[4728]: I1205 11:49:55.452393 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerStarted","Data":"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a"} Dec 05 11:49:55 crc kubenswrapper[4728]: I1205 11:49:55.484984 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wqqwt" podStartSLOduration=4.063246237 podStartE2EDuration="7.484963261s" podCreationTimestamp="2025-12-05 11:49:48 +0000 UTC" firstStartedPulling="2025-12-05 11:49:50.394060257 +0000 UTC m=+2524.536182950" lastFinishedPulling="2025-12-05 11:49:53.815777281 +0000 UTC m=+2527.957899974" observedRunningTime="2025-12-05 11:49:55.467844246 +0000 UTC m=+2529.609966979" watchObservedRunningTime="2025-12-05 11:49:55.484963261 +0000 UTC m=+2529.627085974" Dec 05 11:49:55 crc kubenswrapper[4728]: I1205 11:49:55.506619 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4qkwt" podStartSLOduration=3.098171274 podStartE2EDuration="6.506594406s" podCreationTimestamp="2025-12-05 11:49:49 +0000 UTC" firstStartedPulling="2025-12-05 11:49:51.408436523 +0000 UTC m=+2525.550559236" lastFinishedPulling="2025-12-05 11:49:54.816859675 +0000 UTC m=+2528.958982368" observedRunningTime="2025-12-05 11:49:55.493292423 +0000 UTC m=+2529.635415166" watchObservedRunningTime="2025-12-05 11:49:55.506594406 +0000 UTC m=+2529.648717119" Dec 05 11:49:56 crc kubenswrapper[4728]: I1205 11:49:56.359480 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:49:56 crc kubenswrapper[4728]: E1205 11:49:56.359871 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:49:56 crc kubenswrapper[4728]: I1205 11:49:56.563872 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:56 crc kubenswrapper[4728]: I1205 11:49:56.564221 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:56 crc kubenswrapper[4728]: I1205 11:49:56.616620 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:57 crc kubenswrapper[4728]: I1205 11:49:57.535120 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:49:58 crc kubenswrapper[4728]: I1205 11:49:58.766524 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:58 crc kubenswrapper[4728]: I1205 11:49:58.767765 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:58 crc kubenswrapper[4728]: I1205 11:49:58.816103 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:59 crc kubenswrapper[4728]: I1205 11:49:59.538425 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:49:59 crc kubenswrapper[4728]: I1205 11:49:59.767408 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:59 crc kubenswrapper[4728]: I1205 11:49:59.767464 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:59 crc kubenswrapper[4728]: I1205 11:49:59.818985 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:49:59 crc kubenswrapper[4728]: I1205 11:49:59.821092 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:50:00 crc kubenswrapper[4728]: I1205 11:50:00.515597 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cvg24" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="registry-server" containerID="cri-o://375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01" gracePeriod=2 Dec 05 11:50:00 crc kubenswrapper[4728]: I1205 11:50:00.581842 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.480903 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.544098 4728 generic.go:334] "Generic (PLEG): container finished" podID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerID="375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01" exitCode=0 Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.544152 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerDied","Data":"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01"} Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.544220 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvg24" event={"ID":"8fc4df4d-594c-40ff-8dcf-231c808baa24","Type":"ContainerDied","Data":"45f6b698d63e17ec07e6aaa9c78ac5b25b82f86b8bb9397b011653a6bdc83d2a"} Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.544184 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvg24" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.544238 4728 scope.go:117] "RemoveContainer" containerID="375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.577202 4728 scope.go:117] "RemoveContainer" containerID="5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.586158 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content\") pod \"8fc4df4d-594c-40ff-8dcf-231c808baa24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.586389 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k4sr\" (UniqueName: \"kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr\") pod \"8fc4df4d-594c-40ff-8dcf-231c808baa24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.586505 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities\") pod \"8fc4df4d-594c-40ff-8dcf-231c808baa24\" (UID: \"8fc4df4d-594c-40ff-8dcf-231c808baa24\") " Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.588373 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities" (OuterVolumeSpecName: "utilities") pod "8fc4df4d-594c-40ff-8dcf-231c808baa24" (UID: "8fc4df4d-594c-40ff-8dcf-231c808baa24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.598134 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr" (OuterVolumeSpecName: "kube-api-access-6k4sr") pod "8fc4df4d-594c-40ff-8dcf-231c808baa24" (UID: "8fc4df4d-594c-40ff-8dcf-231c808baa24"). InnerVolumeSpecName "kube-api-access-6k4sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.605358 4728 scope.go:117] "RemoveContainer" containerID="a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.644321 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8fc4df4d-594c-40ff-8dcf-231c808baa24" (UID: "8fc4df4d-594c-40ff-8dcf-231c808baa24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.684390 4728 scope.go:117] "RemoveContainer" containerID="375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01" Dec 05 11:50:01 crc kubenswrapper[4728]: E1205 11:50:01.684896 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01\": container with ID starting with 375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01 not found: ID does not exist" containerID="375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.684930 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01"} err="failed to get container status \"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01\": rpc error: code = NotFound desc = could not find container \"375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01\": container with ID starting with 375dbe18c149dddc1246f4500ef862312f00d0b6e48bec5ecbc74f798f1fba01 not found: ID does not exist" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.684951 4728 scope.go:117] "RemoveContainer" containerID="5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441" Dec 05 11:50:01 crc kubenswrapper[4728]: E1205 11:50:01.685229 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441\": container with ID starting with 5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441 not found: ID does not exist" containerID="5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.685272 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441"} err="failed to get container status \"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441\": rpc error: code = NotFound desc = could not find container \"5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441\": container with ID starting with 5e74ef50104df0542d53f5c3f2833ddcd8d22a4c2c5fd7ba678d97733a18d441 not found: ID does not exist" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.685288 4728 scope.go:117] "RemoveContainer" containerID="a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4" Dec 05 11:50:01 crc kubenswrapper[4728]: E1205 11:50:01.685546 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4\": container with ID starting with a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4 not found: ID does not exist" containerID="a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.685566 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4"} err="failed to get container status \"a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4\": rpc error: code = NotFound desc = could not find container \"a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4\": container with ID starting with a79b2f70bce7b41c2d2bd44de4cf884e7897ad275898aa72565316db4bd1b9a4 not found: ID does not exist" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.689173 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.689195 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fc4df4d-594c-40ff-8dcf-231c808baa24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.689204 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k4sr\" (UniqueName: \"kubernetes.io/projected/8fc4df4d-594c-40ff-8dcf-231c808baa24-kube-api-access-6k4sr\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.881891 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:50:01 crc kubenswrapper[4728]: I1205 11:50:01.889854 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cvg24"] Dec 05 11:50:02 crc kubenswrapper[4728]: I1205 11:50:02.226735 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:50:02 crc kubenswrapper[4728]: I1205 11:50:02.368568 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" path="/var/lib/kubelet/pods/8fc4df4d-594c-40ff-8dcf-231c808baa24/volumes" Dec 05 11:50:02 crc kubenswrapper[4728]: I1205 11:50:02.554403 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wqqwt" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="registry-server" containerID="cri-o://c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33" gracePeriod=2 Dec 05 11:50:02 crc kubenswrapper[4728]: I1205 11:50:02.997557 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.122407 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content\") pod \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.122594 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities\") pod \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.122720 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6ccr\" (UniqueName: \"kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr\") pod \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\" (UID: \"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a\") " Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.123182 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities" (OuterVolumeSpecName: "utilities") pod "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" (UID: "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.123615 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.129809 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr" (OuterVolumeSpecName: "kube-api-access-l6ccr") pod "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" (UID: "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a"). InnerVolumeSpecName "kube-api-access-l6ccr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.188914 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" (UID: "12a742d2-5e93-4495-a5b6-9e95f2f5ae5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.225436 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.225477 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6ccr\" (UniqueName: \"kubernetes.io/projected/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a-kube-api-access-l6ccr\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.566203 4728 generic.go:334] "Generic (PLEG): container finished" podID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerID="c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33" exitCode=0 Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.566237 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerDied","Data":"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33"} Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.566269 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqqwt" event={"ID":"12a742d2-5e93-4495-a5b6-9e95f2f5ae5a","Type":"ContainerDied","Data":"2ffabf8909d582aa29efbbf60ff21e43a40f9f88f2a07e3b8380d7629eb6bb51"} Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.566287 4728 scope.go:117] "RemoveContainer" containerID="c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.566284 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqqwt" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.595966 4728 scope.go:117] "RemoveContainer" containerID="5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.600539 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.610237 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wqqwt"] Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.630293 4728 scope.go:117] "RemoveContainer" containerID="9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.680707 4728 scope.go:117] "RemoveContainer" containerID="c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33" Dec 05 11:50:03 crc kubenswrapper[4728]: E1205 11:50:03.681171 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33\": container with ID starting with c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33 not found: ID does not exist" containerID="c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.681202 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33"} err="failed to get container status \"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33\": rpc error: code = NotFound desc = could not find container \"c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33\": container with ID starting with c62cfb560a125c6497910e4fa7c0ef0a6629f49d6d74e4ec60bcef7cab975a33 not found: ID does not exist" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.681222 4728 scope.go:117] "RemoveContainer" containerID="5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743" Dec 05 11:50:03 crc kubenswrapper[4728]: E1205 11:50:03.681415 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743\": container with ID starting with 5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743 not found: ID does not exist" containerID="5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.681433 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743"} err="failed to get container status \"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743\": rpc error: code = NotFound desc = could not find container \"5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743\": container with ID starting with 5e71ce6203ef5b11329852b71d045c7361c8af65d23c1c5957ea56ae342ac743 not found: ID does not exist" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.681445 4728 scope.go:117] "RemoveContainer" containerID="9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c" Dec 05 11:50:03 crc kubenswrapper[4728]: E1205 11:50:03.681647 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c\": container with ID starting with 9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c not found: ID does not exist" containerID="9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c" Dec 05 11:50:03 crc kubenswrapper[4728]: I1205 11:50:03.681742 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c"} err="failed to get container status \"9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c\": rpc error: code = NotFound desc = could not find container \"9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c\": container with ID starting with 9ea306e2c27561a9964a82216f31e485cb31802ce11a39a77de3186790cc777c not found: ID does not exist" Dec 05 11:50:04 crc kubenswrapper[4728]: I1205 11:50:04.385212 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" path="/var/lib/kubelet/pods/12a742d2-5e93-4495-a5b6-9e95f2f5ae5a/volumes" Dec 05 11:50:04 crc kubenswrapper[4728]: I1205 11:50:04.624150 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:50:04 crc kubenswrapper[4728]: I1205 11:50:04.624437 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4qkwt" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="registry-server" containerID="cri-o://f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a" gracePeriod=2 Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.100943 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.165257 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdnn2\" (UniqueName: \"kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2\") pod \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.165318 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities\") pod \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.165349 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content\") pod \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\" (UID: \"b5201134-74f2-4ad3-af57-e4fe0001fc4f\") " Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.166175 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities" (OuterVolumeSpecName: "utilities") pod "b5201134-74f2-4ad3-af57-e4fe0001fc4f" (UID: "b5201134-74f2-4ad3-af57-e4fe0001fc4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.173011 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2" (OuterVolumeSpecName: "kube-api-access-hdnn2") pod "b5201134-74f2-4ad3-af57-e4fe0001fc4f" (UID: "b5201134-74f2-4ad3-af57-e4fe0001fc4f"). InnerVolumeSpecName "kube-api-access-hdnn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.181437 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5201134-74f2-4ad3-af57-e4fe0001fc4f" (UID: "b5201134-74f2-4ad3-af57-e4fe0001fc4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.268170 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdnn2\" (UniqueName: \"kubernetes.io/projected/b5201134-74f2-4ad3-af57-e4fe0001fc4f-kube-api-access-hdnn2\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.268201 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.268211 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5201134-74f2-4ad3-af57-e4fe0001fc4f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.595464 4728 generic.go:334] "Generic (PLEG): container finished" podID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerID="f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a" exitCode=0 Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.595507 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerDied","Data":"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a"} Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.595565 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qkwt" event={"ID":"b5201134-74f2-4ad3-af57-e4fe0001fc4f","Type":"ContainerDied","Data":"ecdc2eb13e1ef8130c7c634241fce1a3452b22a31d99a2629acbe9156e7a3bae"} Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.595569 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qkwt" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.595588 4728 scope.go:117] "RemoveContainer" containerID="f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.625423 4728 scope.go:117] "RemoveContainer" containerID="a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.641691 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.652335 4728 scope.go:117] "RemoveContainer" containerID="fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.655277 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qkwt"] Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.691880 4728 scope.go:117] "RemoveContainer" containerID="f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a" Dec 05 11:50:05 crc kubenswrapper[4728]: E1205 11:50:05.692430 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a\": container with ID starting with f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a not found: ID does not exist" containerID="f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.692482 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a"} err="failed to get container status \"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a\": rpc error: code = NotFound desc = could not find container \"f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a\": container with ID starting with f1357f0bb54662ba0b869346496bccb97b97814c7aec9cbf51686a78a568255a not found: ID does not exist" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.692516 4728 scope.go:117] "RemoveContainer" containerID="a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c" Dec 05 11:50:05 crc kubenswrapper[4728]: E1205 11:50:05.693066 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c\": container with ID starting with a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c not found: ID does not exist" containerID="a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.693102 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c"} err="failed to get container status \"a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c\": rpc error: code = NotFound desc = could not find container \"a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c\": container with ID starting with a795fbe9cbdc03cceacf98ec8a949e26ad48e77661e006a1f2db741c8907f17c not found: ID does not exist" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.693124 4728 scope.go:117] "RemoveContainer" containerID="fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170" Dec 05 11:50:05 crc kubenswrapper[4728]: E1205 11:50:05.693472 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170\": container with ID starting with fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170 not found: ID does not exist" containerID="fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170" Dec 05 11:50:05 crc kubenswrapper[4728]: I1205 11:50:05.693514 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170"} err="failed to get container status \"fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170\": rpc error: code = NotFound desc = could not find container \"fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170\": container with ID starting with fafb90ee6dc689666061facaa360e6fcdf530f20142aeb604ed13c7650858170 not found: ID does not exist" Dec 05 11:50:06 crc kubenswrapper[4728]: I1205 11:50:06.365351 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" path="/var/lib/kubelet/pods/b5201134-74f2-4ad3-af57-e4fe0001fc4f/volumes" Dec 05 11:50:10 crc kubenswrapper[4728]: I1205 11:50:10.351855 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:50:10 crc kubenswrapper[4728]: E1205 11:50:10.352570 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:50:25 crc kubenswrapper[4728]: I1205 11:50:25.352637 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:50:25 crc kubenswrapper[4728]: E1205 11:50:25.353660 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:50:25 crc kubenswrapper[4728]: I1205 11:50:25.808861 4728 generic.go:334] "Generic (PLEG): container finished" podID="21cac74f-ba27-4db1-9cbe-6189f230e514" containerID="60e44c117f8086becfb0cc989de464a7485e717bcb8af285c3b199abf8f1319b" exitCode=0 Dec 05 11:50:25 crc kubenswrapper[4728]: I1205 11:50:25.808908 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" event={"ID":"21cac74f-ba27-4db1-9cbe-6189f230e514","Type":"ContainerDied","Data":"60e44c117f8086becfb0cc989de464a7485e717bcb8af285c3b199abf8f1319b"} Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.265382 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.345676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgvw9\" (UniqueName: \"kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9\") pod \"21cac74f-ba27-4db1-9cbe-6189f230e514\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.345777 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle\") pod \"21cac74f-ba27-4db1-9cbe-6189f230e514\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.345822 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key\") pod \"21cac74f-ba27-4db1-9cbe-6189f230e514\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.345856 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0\") pod \"21cac74f-ba27-4db1-9cbe-6189f230e514\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.345874 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory\") pod \"21cac74f-ba27-4db1-9cbe-6189f230e514\" (UID: \"21cac74f-ba27-4db1-9cbe-6189f230e514\") " Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.352886 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "21cac74f-ba27-4db1-9cbe-6189f230e514" (UID: "21cac74f-ba27-4db1-9cbe-6189f230e514"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.355001 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9" (OuterVolumeSpecName: "kube-api-access-vgvw9") pod "21cac74f-ba27-4db1-9cbe-6189f230e514" (UID: "21cac74f-ba27-4db1-9cbe-6189f230e514"). InnerVolumeSpecName "kube-api-access-vgvw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.374896 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "21cac74f-ba27-4db1-9cbe-6189f230e514" (UID: "21cac74f-ba27-4db1-9cbe-6189f230e514"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.376032 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "21cac74f-ba27-4db1-9cbe-6189f230e514" (UID: "21cac74f-ba27-4db1-9cbe-6189f230e514"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.377542 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory" (OuterVolumeSpecName: "inventory") pod "21cac74f-ba27-4db1-9cbe-6189f230e514" (UID: "21cac74f-ba27-4db1-9cbe-6189f230e514"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.449933 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgvw9\" (UniqueName: \"kubernetes.io/projected/21cac74f-ba27-4db1-9cbe-6189f230e514-kube-api-access-vgvw9\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.449986 4728 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.450045 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.450066 4728 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.450141 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21cac74f-ba27-4db1-9cbe-6189f230e514-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.830364 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" event={"ID":"21cac74f-ba27-4db1-9cbe-6189f230e514","Type":"ContainerDied","Data":"f5688d72899dc206f7f6cfc7f4d81cf4fcc11cac12486520e1a0e86751cf9114"} Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.830398 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5688d72899dc206f7f6cfc7f4d81cf4fcc11cac12486520e1a0e86751cf9114" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.830490 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-h792n" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939406 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p"] Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939784 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939819 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939837 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939843 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939851 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939856 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939868 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939875 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939884 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939890 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939906 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939912 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="extract-utilities" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939928 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21cac74f-ba27-4db1-9cbe-6189f230e514" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939934 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="21cac74f-ba27-4db1-9cbe-6189f230e514" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939945 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939952 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939968 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939974 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: E1205 11:50:27.939985 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.939991 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="extract-content" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.940179 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="12a742d2-5e93-4495-a5b6-9e95f2f5ae5a" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.940189 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="21cac74f-ba27-4db1-9cbe-6189f230e514" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.940199 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc4df4d-594c-40ff-8dcf-231c808baa24" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.940218 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5201134-74f2-4ad3-af57-e4fe0001fc4f" containerName="registry-server" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.940851 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.943901 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.943902 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.943902 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.945518 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.945522 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.946387 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.946442 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b4kzh" Dec 05 11:50:27 crc kubenswrapper[4728]: I1205 11:50:27.954094 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p"] Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076009 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076055 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076089 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076106 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44gnd\" (UniqueName: \"kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076142 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076166 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076220 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076241 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.076356 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177500 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177540 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177571 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177588 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44gnd\" (UniqueName: \"kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177620 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177641 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177698 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177715 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.177755 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.178829 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.181845 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.182028 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.182044 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.182680 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.183345 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.183964 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.184604 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.198415 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44gnd\" (UniqueName: \"kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bw26p\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.263235 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.827760 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p"] Dec 05 11:50:28 crc kubenswrapper[4728]: I1205 11:50:28.841218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" event={"ID":"1284e61e-761e-482e-930f-ba0e75280dd7","Type":"ContainerStarted","Data":"b9f21f2d6d905f1f19395de383904cadccc92c4a3fd3f1cf64cbf0082de40a65"} Dec 05 11:50:29 crc kubenswrapper[4728]: I1205 11:50:29.853666 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" event={"ID":"1284e61e-761e-482e-930f-ba0e75280dd7","Type":"ContainerStarted","Data":"d561bcebc779b90253ebda4234980c809411b362a55e6ee1181b8d71ec9a84a4"} Dec 05 11:50:29 crc kubenswrapper[4728]: I1205 11:50:29.880529 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" podStartSLOduration=2.251759011 podStartE2EDuration="2.880509063s" podCreationTimestamp="2025-12-05 11:50:27 +0000 UTC" firstStartedPulling="2025-12-05 11:50:28.824393331 +0000 UTC m=+2562.966516024" lastFinishedPulling="2025-12-05 11:50:29.453143373 +0000 UTC m=+2563.595266076" observedRunningTime="2025-12-05 11:50:29.872522889 +0000 UTC m=+2564.014645592" watchObservedRunningTime="2025-12-05 11:50:29.880509063 +0000 UTC m=+2564.022631766" Dec 05 11:50:37 crc kubenswrapper[4728]: I1205 11:50:37.352862 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:50:37 crc kubenswrapper[4728]: E1205 11:50:37.353824 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:50:51 crc kubenswrapper[4728]: I1205 11:50:51.352665 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:50:51 crc kubenswrapper[4728]: E1205 11:50:51.353357 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:51:06 crc kubenswrapper[4728]: I1205 11:51:06.361097 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:51:06 crc kubenswrapper[4728]: E1205 11:51:06.362058 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:51:20 crc kubenswrapper[4728]: I1205 11:51:20.352916 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:51:20 crc kubenswrapper[4728]: E1205 11:51:20.353743 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:51:31 crc kubenswrapper[4728]: I1205 11:51:31.352219 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:51:31 crc kubenswrapper[4728]: E1205 11:51:31.353400 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:51:43 crc kubenswrapper[4728]: I1205 11:51:43.353098 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:51:43 crc kubenswrapper[4728]: E1205 11:51:43.354479 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:51:57 crc kubenswrapper[4728]: I1205 11:51:57.352367 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:51:57 crc kubenswrapper[4728]: E1205 11:51:57.353855 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:52:10 crc kubenswrapper[4728]: I1205 11:52:10.352281 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:52:10 crc kubenswrapper[4728]: E1205 11:52:10.353160 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:52:22 crc kubenswrapper[4728]: I1205 11:52:22.352624 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:52:22 crc kubenswrapper[4728]: E1205 11:52:22.353367 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:52:25 crc kubenswrapper[4728]: I1205 11:52:25.687340 4728 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7xxhg container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 11:52:25 crc kubenswrapper[4728]: I1205 11:52:25.687837 4728 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" podUID="977887c0-1f95-4b49-ac6e-34d90aa8d305" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 11:52:25 crc kubenswrapper[4728]: I1205 11:52:25.687400 4728 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7xxhg container/olm-operator namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 11:52:25 crc kubenswrapper[4728]: I1205 11:52:25.688012 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7xxhg" podUID="977887c0-1f95-4b49-ac6e-34d90aa8d305" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 11:52:37 crc kubenswrapper[4728]: I1205 11:52:37.352754 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:52:37 crc kubenswrapper[4728]: I1205 11:52:37.601740 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d"} Dec 05 11:53:33 crc kubenswrapper[4728]: I1205 11:53:33.137366 4728 generic.go:334] "Generic (PLEG): container finished" podID="1284e61e-761e-482e-930f-ba0e75280dd7" containerID="d561bcebc779b90253ebda4234980c809411b362a55e6ee1181b8d71ec9a84a4" exitCode=0 Dec 05 11:53:33 crc kubenswrapper[4728]: I1205 11:53:33.137457 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" event={"ID":"1284e61e-761e-482e-930f-ba0e75280dd7","Type":"ContainerDied","Data":"d561bcebc779b90253ebda4234980c809411b362a55e6ee1181b8d71ec9a84a4"} Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.610885 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.732993 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44gnd\" (UniqueName: \"kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733061 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733097 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733129 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733161 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733193 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733353 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733402 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.733451 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.745566 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.754575 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd" (OuterVolumeSpecName: "kube-api-access-44gnd") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "kube-api-access-44gnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.763137 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.763690 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.763963 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: E1205 11:53:34.777781 4728 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory podName:1284e61e-761e-482e-930f-ba0e75280dd7 nodeName:}" failed. No retries permitted until 2025-12-05 11:53:35.27775303 +0000 UTC m=+2749.419875773 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7") : error deleting /var/lib/kubelet/pods/1284e61e-761e-482e-930f-ba0e75280dd7/volume-subpaths: remove /var/lib/kubelet/pods/1284e61e-761e-482e-930f-ba0e75280dd7/volume-subpaths: no such file or directory Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.777876 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.780422 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.785282 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835361 4728 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835391 4728 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1284e61e-761e-482e-930f-ba0e75280dd7-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835408 4728 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835423 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44gnd\" (UniqueName: \"kubernetes.io/projected/1284e61e-761e-482e-930f-ba0e75280dd7-kube-api-access-44gnd\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835435 4728 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835447 4728 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835458 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:34 crc kubenswrapper[4728]: I1205 11:53:34.835469 4728 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.160087 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" event={"ID":"1284e61e-761e-482e-930f-ba0e75280dd7","Type":"ContainerDied","Data":"b9f21f2d6d905f1f19395de383904cadccc92c4a3fd3f1cf64cbf0082de40a65"} Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.160596 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9f21f2d6d905f1f19395de383904cadccc92c4a3fd3f1cf64cbf0082de40a65" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.160128 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bw26p" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.265394 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg"] Dec 05 11:53:35 crc kubenswrapper[4728]: E1205 11:53:35.265838 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1284e61e-761e-482e-930f-ba0e75280dd7" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.265859 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1284e61e-761e-482e-930f-ba0e75280dd7" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.266422 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1284e61e-761e-482e-930f-ba0e75280dd7" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.268500 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.270139 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.282711 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg"] Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.344303 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") pod \"1284e61e-761e-482e-930f-ba0e75280dd7\" (UID: \"1284e61e-761e-482e-930f-ba0e75280dd7\") " Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.344706 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.344737 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.344769 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.345099 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrhvs\" (UniqueName: \"kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.345222 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.345304 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.345435 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.354018 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory" (OuterVolumeSpecName: "inventory") pod "1284e61e-761e-482e-930f-ba0e75280dd7" (UID: "1284e61e-761e-482e-930f-ba0e75280dd7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.446768 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447108 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447297 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447470 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447608 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447753 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.447962 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrhvs\" (UniqueName: \"kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.448124 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1284e61e-761e-482e-930f-ba0e75280dd7-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.450642 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.450772 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.451286 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.451442 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.451499 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.451556 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.465761 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrhvs\" (UniqueName: \"kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:35 crc kubenswrapper[4728]: I1205 11:53:35.608099 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:53:36 crc kubenswrapper[4728]: I1205 11:53:36.215267 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg"] Dec 05 11:53:37 crc kubenswrapper[4728]: I1205 11:53:37.183723 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" event={"ID":"08a82141-a6d5-4c68-9adb-9c4158a6c7c2","Type":"ContainerStarted","Data":"de4e5f09dcb6c84bae2f35ff930f8522f9e25feb40ec8e8aafa1083e16d266db"} Dec 05 11:53:37 crc kubenswrapper[4728]: I1205 11:53:37.184313 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" event={"ID":"08a82141-a6d5-4c68-9adb-9c4158a6c7c2","Type":"ContainerStarted","Data":"9a4302fd0cf665e9263fc467123e4e82be61539a84f535ede44c4457a421d8c5"} Dec 05 11:53:37 crc kubenswrapper[4728]: I1205 11:53:37.202136 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" podStartSLOduration=1.4904528479999999 podStartE2EDuration="2.202116807s" podCreationTimestamp="2025-12-05 11:53:35 +0000 UTC" firstStartedPulling="2025-12-05 11:53:36.21859038 +0000 UTC m=+2750.360713083" lastFinishedPulling="2025-12-05 11:53:36.930254349 +0000 UTC m=+2751.072377042" observedRunningTime="2025-12-05 11:53:37.198476619 +0000 UTC m=+2751.340599312" watchObservedRunningTime="2025-12-05 11:53:37.202116807 +0000 UTC m=+2751.344239520" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.372549 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.377674 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.386871 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.469992 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjx9w\" (UniqueName: \"kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.470984 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.471440 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.573982 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.574186 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjx9w\" (UniqueName: \"kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.574250 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.574553 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.574704 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.594313 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjx9w\" (UniqueName: \"kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w\") pod \"redhat-operators-2s4qc\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:07 crc kubenswrapper[4728]: I1205 11:54:07.714294 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:08 crc kubenswrapper[4728]: I1205 11:54:08.200303 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:08 crc kubenswrapper[4728]: W1205 11:54:08.202216 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3f237bc_2866_49bc_9fe9_ad63e416d593.slice/crio-32e5cd75998dc73399c316509560a219b5061482f027b139b8ac70489ac856a4 WatchSource:0}: Error finding container 32e5cd75998dc73399c316509560a219b5061482f027b139b8ac70489ac856a4: Status 404 returned error can't find the container with id 32e5cd75998dc73399c316509560a219b5061482f027b139b8ac70489ac856a4 Dec 05 11:54:08 crc kubenswrapper[4728]: I1205 11:54:08.488454 4728 generic.go:334] "Generic (PLEG): container finished" podID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerID="f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b" exitCode=0 Dec 05 11:54:08 crc kubenswrapper[4728]: I1205 11:54:08.488510 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerDied","Data":"f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b"} Dec 05 11:54:08 crc kubenswrapper[4728]: I1205 11:54:08.488539 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerStarted","Data":"32e5cd75998dc73399c316509560a219b5061482f027b139b8ac70489ac856a4"} Dec 05 11:54:09 crc kubenswrapper[4728]: I1205 11:54:09.500737 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerStarted","Data":"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8"} Dec 05 11:54:12 crc kubenswrapper[4728]: I1205 11:54:12.526174 4728 generic.go:334] "Generic (PLEG): container finished" podID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerID="3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8" exitCode=0 Dec 05 11:54:12 crc kubenswrapper[4728]: I1205 11:54:12.526379 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerDied","Data":"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8"} Dec 05 11:54:13 crc kubenswrapper[4728]: I1205 11:54:13.539520 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerStarted","Data":"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5"} Dec 05 11:54:13 crc kubenswrapper[4728]: I1205 11:54:13.559603 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2s4qc" podStartSLOduration=2.111687227 podStartE2EDuration="6.559578184s" podCreationTimestamp="2025-12-05 11:54:07 +0000 UTC" firstStartedPulling="2025-12-05 11:54:08.490119015 +0000 UTC m=+2782.632241708" lastFinishedPulling="2025-12-05 11:54:12.938009972 +0000 UTC m=+2787.080132665" observedRunningTime="2025-12-05 11:54:13.556538643 +0000 UTC m=+2787.698661356" watchObservedRunningTime="2025-12-05 11:54:13.559578184 +0000 UTC m=+2787.701700897" Dec 05 11:54:17 crc kubenswrapper[4728]: I1205 11:54:17.714852 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:17 crc kubenswrapper[4728]: I1205 11:54:17.716293 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:18 crc kubenswrapper[4728]: I1205 11:54:18.765881 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2s4qc" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="registry-server" probeResult="failure" output=< Dec 05 11:54:18 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 11:54:18 crc kubenswrapper[4728]: > Dec 05 11:54:27 crc kubenswrapper[4728]: I1205 11:54:27.758438 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:27 crc kubenswrapper[4728]: I1205 11:54:27.809696 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:28 crc kubenswrapper[4728]: I1205 11:54:28.447321 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:29 crc kubenswrapper[4728]: I1205 11:54:29.694668 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2s4qc" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="registry-server" containerID="cri-o://67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5" gracePeriod=2 Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.657421 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.705910 4728 generic.go:334] "Generic (PLEG): container finished" podID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerID="67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5" exitCode=0 Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.705951 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerDied","Data":"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5"} Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.705978 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2s4qc" event={"ID":"e3f237bc-2866-49bc-9fe9-ad63e416d593","Type":"ContainerDied","Data":"32e5cd75998dc73399c316509560a219b5061482f027b139b8ac70489ac856a4"} Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.705995 4728 scope.go:117] "RemoveContainer" containerID="67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.706140 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2s4qc" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.732813 4728 scope.go:117] "RemoveContainer" containerID="3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.755579 4728 scope.go:117] "RemoveContainer" containerID="f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.762484 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjx9w\" (UniqueName: \"kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w\") pod \"e3f237bc-2866-49bc-9fe9-ad63e416d593\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.767388 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content\") pod \"e3f237bc-2866-49bc-9fe9-ad63e416d593\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.767586 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities\") pod \"e3f237bc-2866-49bc-9fe9-ad63e416d593\" (UID: \"e3f237bc-2866-49bc-9fe9-ad63e416d593\") " Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.769650 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities" (OuterVolumeSpecName: "utilities") pod "e3f237bc-2866-49bc-9fe9-ad63e416d593" (UID: "e3f237bc-2866-49bc-9fe9-ad63e416d593"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.769770 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w" (OuterVolumeSpecName: "kube-api-access-cjx9w") pod "e3f237bc-2866-49bc-9fe9-ad63e416d593" (UID: "e3f237bc-2866-49bc-9fe9-ad63e416d593"). InnerVolumeSpecName "kube-api-access-cjx9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.844106 4728 scope.go:117] "RemoveContainer" containerID="67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5" Dec 05 11:54:30 crc kubenswrapper[4728]: E1205 11:54:30.844498 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5\": container with ID starting with 67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5 not found: ID does not exist" containerID="67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.844531 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5"} err="failed to get container status \"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5\": rpc error: code = NotFound desc = could not find container \"67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5\": container with ID starting with 67cb2b081d507c155f480af573b9084f22990793a7c5091a4e16e027221587f5 not found: ID does not exist" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.844556 4728 scope.go:117] "RemoveContainer" containerID="3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8" Dec 05 11:54:30 crc kubenswrapper[4728]: E1205 11:54:30.844940 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8\": container with ID starting with 3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8 not found: ID does not exist" containerID="3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.844965 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8"} err="failed to get container status \"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8\": rpc error: code = NotFound desc = could not find container \"3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8\": container with ID starting with 3576e24ac059b032437a4501d321fad3f700b70766a7a21baf2fefd8e4365ad8 not found: ID does not exist" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.844982 4728 scope.go:117] "RemoveContainer" containerID="f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b" Dec 05 11:54:30 crc kubenswrapper[4728]: E1205 11:54:30.845351 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b\": container with ID starting with f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b not found: ID does not exist" containerID="f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.845378 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b"} err="failed to get container status \"f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b\": rpc error: code = NotFound desc = could not find container \"f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b\": container with ID starting with f0fb944bdec12cbb9ed2ba6e912b7d79fd2ec3508c722b7053b5a43638337f9b not found: ID does not exist" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.870880 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.870919 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjx9w\" (UniqueName: \"kubernetes.io/projected/e3f237bc-2866-49bc-9fe9-ad63e416d593-kube-api-access-cjx9w\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.897389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3f237bc-2866-49bc-9fe9-ad63e416d593" (UID: "e3f237bc-2866-49bc-9fe9-ad63e416d593"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:54:30 crc kubenswrapper[4728]: I1205 11:54:30.973256 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f237bc-2866-49bc-9fe9-ad63e416d593-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:31 crc kubenswrapper[4728]: I1205 11:54:31.065875 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:31 crc kubenswrapper[4728]: I1205 11:54:31.073875 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2s4qc"] Dec 05 11:54:32 crc kubenswrapper[4728]: I1205 11:54:32.365579 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" path="/var/lib/kubelet/pods/e3f237bc-2866-49bc-9fe9-ad63e416d593/volumes" Dec 05 11:54:55 crc kubenswrapper[4728]: I1205 11:54:55.701764 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:54:55 crc kubenswrapper[4728]: I1205 11:54:55.702696 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:55:25 crc kubenswrapper[4728]: I1205 11:55:25.701826 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:55:25 crc kubenswrapper[4728]: I1205 11:55:25.702539 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:55:55 crc kubenswrapper[4728]: I1205 11:55:55.701903 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:55:55 crc kubenswrapper[4728]: I1205 11:55:55.703452 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:55:55 crc kubenswrapper[4728]: I1205 11:55:55.703540 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:55:55 crc kubenswrapper[4728]: I1205 11:55:55.704675 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:55:55 crc kubenswrapper[4728]: I1205 11:55:55.704777 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d" gracePeriod=600 Dec 05 11:55:56 crc kubenswrapper[4728]: I1205 11:55:56.545495 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d" exitCode=0 Dec 05 11:55:56 crc kubenswrapper[4728]: I1205 11:55:56.545690 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d"} Dec 05 11:55:56 crc kubenswrapper[4728]: I1205 11:55:56.546443 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994"} Dec 05 11:55:56 crc kubenswrapper[4728]: I1205 11:55:56.546473 4728 scope.go:117] "RemoveContainer" containerID="2d07033b290f931a3661668c97d1daf60b1b0020e8132f72469f3cf338c1768e" Dec 05 11:55:57 crc kubenswrapper[4728]: I1205 11:55:57.556382 4728 generic.go:334] "Generic (PLEG): container finished" podID="08a82141-a6d5-4c68-9adb-9c4158a6c7c2" containerID="de4e5f09dcb6c84bae2f35ff930f8522f9e25feb40ec8e8aafa1083e16d266db" exitCode=0 Dec 05 11:55:57 crc kubenswrapper[4728]: I1205 11:55:57.556450 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" event={"ID":"08a82141-a6d5-4c68-9adb-9c4158a6c7c2","Type":"ContainerDied","Data":"de4e5f09dcb6c84bae2f35ff930f8522f9e25feb40ec8e8aafa1083e16d266db"} Dec 05 11:55:58 crc kubenswrapper[4728]: I1205 11:55:58.963048 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102393 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102494 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrhvs\" (UniqueName: \"kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102556 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102616 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102685 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102753 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.102956 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle\") pod \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\" (UID: \"08a82141-a6d5-4c68-9adb-9c4158a6c7c2\") " Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.108413 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.109712 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs" (OuterVolumeSpecName: "kube-api-access-qrhvs") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "kube-api-access-qrhvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.131386 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.135466 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.141617 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.141660 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory" (OuterVolumeSpecName: "inventory") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.145301 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "08a82141-a6d5-4c68-9adb-9c4158a6c7c2" (UID: "08a82141-a6d5-4c68-9adb-9c4158a6c7c2"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205025 4728 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205056 4728 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205067 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205077 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrhvs\" (UniqueName: \"kubernetes.io/projected/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-kube-api-access-qrhvs\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205088 4728 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205097 4728 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.205107 4728 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/08a82141-a6d5-4c68-9adb-9c4158a6c7c2-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.579291 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.579406 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg" event={"ID":"08a82141-a6d5-4c68-9adb-9c4158a6c7c2","Type":"ContainerDied","Data":"9a4302fd0cf665e9263fc467123e4e82be61539a84f535ede44c4457a421d8c5"} Dec 05 11:55:59 crc kubenswrapper[4728]: I1205 11:55:59.579790 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a4302fd0cf665e9263fc467123e4e82be61539a84f535ede44c4457a421d8c5" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.126598 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:57:05 crc kubenswrapper[4728]: E1205 11:57:05.127703 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="extract-content" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.127721 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="extract-content" Dec 05 11:57:05 crc kubenswrapper[4728]: E1205 11:57:05.127753 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08a82141-a6d5-4c68-9adb-9c4158a6c7c2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.127766 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="08a82141-a6d5-4c68-9adb-9c4158a6c7c2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:57:05 crc kubenswrapper[4728]: E1205 11:57:05.127824 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="registry-server" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.127837 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="registry-server" Dec 05 11:57:05 crc kubenswrapper[4728]: E1205 11:57:05.127859 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="extract-utilities" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.127868 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="extract-utilities" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.128129 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3f237bc-2866-49bc-9fe9-ad63e416d593" containerName="registry-server" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.128145 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="08a82141-a6d5-4c68-9adb-9c4158a6c7c2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.129200 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.133400 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.133985 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.134032 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.150703 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208598 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208648 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208677 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208726 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208765 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208807 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f5fb\" (UniqueName: \"kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208856 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208872 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.208921 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310658 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310714 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310748 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310779 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310834 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310862 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f5fb\" (UniqueName: \"kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310912 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310931 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.310982 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.311439 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.311489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.311924 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.312838 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.319442 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.320187 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.327867 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.333990 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.342025 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f5fb\" (UniqueName: \"kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.362686 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.450806 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.924863 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:57:05 crc kubenswrapper[4728]: I1205 11:57:05.927535 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:57:06 crc kubenswrapper[4728]: I1205 11:57:06.216391 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b71aa6bd-22ea-4144-84ea-a241546286a2","Type":"ContainerStarted","Data":"909bae2cc483d74ed9ae06d98eb37cbac89332b7ed5239cb8ba080e718b8e78b"} Dec 05 11:57:39 crc kubenswrapper[4728]: E1205 11:57:39.868062 4728 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 11:57:39 crc kubenswrapper[4728]: E1205 11:57:39.868857 4728 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6f5fb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(b71aa6bd-22ea-4144-84ea-a241546286a2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:57:39 crc kubenswrapper[4728]: E1205 11:57:39.870154 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="b71aa6bd-22ea-4144-84ea-a241546286a2" Dec 05 11:57:40 crc kubenswrapper[4728]: E1205 11:57:40.601228 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="b71aa6bd-22ea-4144-84ea-a241546286a2" Dec 05 11:57:52 crc kubenswrapper[4728]: I1205 11:57:52.933001 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 11:57:54 crc kubenswrapper[4728]: I1205 11:57:54.742609 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b71aa6bd-22ea-4144-84ea-a241546286a2","Type":"ContainerStarted","Data":"82c1c690c9ef66263b9288ab1da5afd24ef66692660e310f392679fcc9ca1cdb"} Dec 05 11:57:54 crc kubenswrapper[4728]: I1205 11:57:54.767184 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.76607879 podStartE2EDuration="50.767155515s" podCreationTimestamp="2025-12-05 11:57:04 +0000 UTC" firstStartedPulling="2025-12-05 11:57:05.927341257 +0000 UTC m=+2960.069463950" lastFinishedPulling="2025-12-05 11:57:52.928417962 +0000 UTC m=+3007.070540675" observedRunningTime="2025-12-05 11:57:54.761844774 +0000 UTC m=+3008.903967477" watchObservedRunningTime="2025-12-05 11:57:54.767155515 +0000 UTC m=+3008.909278218" Dec 05 11:58:25 crc kubenswrapper[4728]: I1205 11:58:25.702456 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:58:25 crc kubenswrapper[4728]: I1205 11:58:25.703241 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:58:55 crc kubenswrapper[4728]: I1205 11:58:55.701933 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:58:55 crc kubenswrapper[4728]: I1205 11:58:55.702487 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:59:25 crc kubenswrapper[4728]: I1205 11:59:25.701746 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:59:25 crc kubenswrapper[4728]: I1205 11:59:25.702348 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:59:25 crc kubenswrapper[4728]: I1205 11:59:25.702404 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 11:59:25 crc kubenswrapper[4728]: I1205 11:59:25.703182 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:59:25 crc kubenswrapper[4728]: I1205 11:59:25.703246 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" gracePeriod=600 Dec 05 11:59:25 crc kubenswrapper[4728]: E1205 11:59:25.840967 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:59:26 crc kubenswrapper[4728]: I1205 11:59:26.692288 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" exitCode=0 Dec 05 11:59:26 crc kubenswrapper[4728]: I1205 11:59:26.692355 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994"} Dec 05 11:59:26 crc kubenswrapper[4728]: I1205 11:59:26.692626 4728 scope.go:117] "RemoveContainer" containerID="e948f80ca1fe2f0aa74620908aeae037dd5cfb64e02974b258183cfdc09f050d" Dec 05 11:59:26 crc kubenswrapper[4728]: I1205 11:59:26.693374 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 11:59:26 crc kubenswrapper[4728]: E1205 11:59:26.693668 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:59:41 crc kubenswrapper[4728]: I1205 11:59:41.352403 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 11:59:41 crc kubenswrapper[4728]: E1205 11:59:41.353161 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 11:59:53 crc kubenswrapper[4728]: I1205 11:59:53.352146 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 11:59:53 crc kubenswrapper[4728]: E1205 11:59:53.353033 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.154432 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj"] Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.156770 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.166663 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj"] Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.176924 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.176985 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.293997 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.294210 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhgs4\" (UniqueName: \"kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.294242 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.395984 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhgs4\" (UniqueName: \"kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.396047 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.396146 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.397636 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.404178 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.418236 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhgs4\" (UniqueName: \"kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4\") pod \"collect-profiles-29415600-kbdcj\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:00 crc kubenswrapper[4728]: I1205 12:00:00.496585 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:01 crc kubenswrapper[4728]: I1205 12:00:01.024613 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj"] Dec 05 12:00:02 crc kubenswrapper[4728]: I1205 12:00:02.023155 4728 generic.go:334] "Generic (PLEG): container finished" podID="9d9578ce-cff6-4087-bb68-c74eaf3dccdf" containerID="9af8c06014f09e163c61dd3acb8f4b0e45bd585b6871919021fc6aa2bffc606c" exitCode=0 Dec 05 12:00:02 crc kubenswrapper[4728]: I1205 12:00:02.023202 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" event={"ID":"9d9578ce-cff6-4087-bb68-c74eaf3dccdf","Type":"ContainerDied","Data":"9af8c06014f09e163c61dd3acb8f4b0e45bd585b6871919021fc6aa2bffc606c"} Dec 05 12:00:02 crc kubenswrapper[4728]: I1205 12:00:02.023469 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" event={"ID":"9d9578ce-cff6-4087-bb68-c74eaf3dccdf","Type":"ContainerStarted","Data":"ee2681e23387c9e49b9684b4031e772cedde5c59885efddef67aa866f131d4e2"} Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.618524 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.761030 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume\") pod \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.761085 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume\") pod \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.761156 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhgs4\" (UniqueName: \"kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4\") pod \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\" (UID: \"9d9578ce-cff6-4087-bb68-c74eaf3dccdf\") " Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.761923 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume" (OuterVolumeSpecName: "config-volume") pod "9d9578ce-cff6-4087-bb68-c74eaf3dccdf" (UID: "9d9578ce-cff6-4087-bb68-c74eaf3dccdf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.767620 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4" (OuterVolumeSpecName: "kube-api-access-dhgs4") pod "9d9578ce-cff6-4087-bb68-c74eaf3dccdf" (UID: "9d9578ce-cff6-4087-bb68-c74eaf3dccdf"). InnerVolumeSpecName "kube-api-access-dhgs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.781848 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9d9578ce-cff6-4087-bb68-c74eaf3dccdf" (UID: "9d9578ce-cff6-4087-bb68-c74eaf3dccdf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.863376 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.863415 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:03 crc kubenswrapper[4728]: I1205 12:00:03.863425 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhgs4\" (UniqueName: \"kubernetes.io/projected/9d9578ce-cff6-4087-bb68-c74eaf3dccdf-kube-api-access-dhgs4\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:04 crc kubenswrapper[4728]: I1205 12:00:04.043001 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" event={"ID":"9d9578ce-cff6-4087-bb68-c74eaf3dccdf","Type":"ContainerDied","Data":"ee2681e23387c9e49b9684b4031e772cedde5c59885efddef67aa866f131d4e2"} Dec 05 12:00:04 crc kubenswrapper[4728]: I1205 12:00:04.043266 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee2681e23387c9e49b9684b4031e772cedde5c59885efddef67aa866f131d4e2" Dec 05 12:00:04 crc kubenswrapper[4728]: I1205 12:00:04.043071 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj" Dec 05 12:00:04 crc kubenswrapper[4728]: I1205 12:00:04.686030 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c"] Dec 05 12:00:04 crc kubenswrapper[4728]: I1205 12:00:04.698108 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-lpg6c"] Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.352337 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:00:05 crc kubenswrapper[4728]: E1205 12:00:05.352855 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.688014 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:05 crc kubenswrapper[4728]: E1205 12:00:05.688467 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d9578ce-cff6-4087-bb68-c74eaf3dccdf" containerName="collect-profiles" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.688481 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d9578ce-cff6-4087-bb68-c74eaf3dccdf" containerName="collect-profiles" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.688666 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d9578ce-cff6-4087-bb68-c74eaf3dccdf" containerName="collect-profiles" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.689980 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.710346 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.843497 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.843554 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.844090 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km4ts\" (UniqueName: \"kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.946170 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km4ts\" (UniqueName: \"kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.946266 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.946729 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.946956 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.946302 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:05 crc kubenswrapper[4728]: I1205 12:00:05.967079 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km4ts\" (UniqueName: \"kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts\") pod \"community-operators-92wkq\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:06 crc kubenswrapper[4728]: I1205 12:00:06.048395 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:06 crc kubenswrapper[4728]: I1205 12:00:06.363045 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2abbdcf9-ccd2-4ad9-8ada-590ae997d61a" path="/var/lib/kubelet/pods/2abbdcf9-ccd2-4ad9-8ada-590ae997d61a/volumes" Dec 05 12:00:06 crc kubenswrapper[4728]: I1205 12:00:06.570863 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:06 crc kubenswrapper[4728]: W1205 12:00:06.573459 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee303524_640f_496f_a93d_3c0d187a93a9.slice/crio-a6346e7b8161b4ac4596488451ad809d113e0ca58248a8b3bcf6da2e46d58039 WatchSource:0}: Error finding container a6346e7b8161b4ac4596488451ad809d113e0ca58248a8b3bcf6da2e46d58039: Status 404 returned error can't find the container with id a6346e7b8161b4ac4596488451ad809d113e0ca58248a8b3bcf6da2e46d58039 Dec 05 12:00:07 crc kubenswrapper[4728]: I1205 12:00:07.068435 4728 generic.go:334] "Generic (PLEG): container finished" podID="ee303524-640f-496f-a93d-3c0d187a93a9" containerID="38b9a53d4dc0db6350e691d5b6245b472a1b06281a81097cda45e12c4f65a717" exitCode=0 Dec 05 12:00:07 crc kubenswrapper[4728]: I1205 12:00:07.068522 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerDied","Data":"38b9a53d4dc0db6350e691d5b6245b472a1b06281a81097cda45e12c4f65a717"} Dec 05 12:00:07 crc kubenswrapper[4728]: I1205 12:00:07.068771 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerStarted","Data":"a6346e7b8161b4ac4596488451ad809d113e0ca58248a8b3bcf6da2e46d58039"} Dec 05 12:00:08 crc kubenswrapper[4728]: I1205 12:00:08.086504 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerStarted","Data":"e0e26ad4bf7f16c14d7a2254dbfcd4fe5cb1c61d46dcfaefb1636ff60ad21512"} Dec 05 12:00:10 crc kubenswrapper[4728]: I1205 12:00:10.104348 4728 generic.go:334] "Generic (PLEG): container finished" podID="ee303524-640f-496f-a93d-3c0d187a93a9" containerID="e0e26ad4bf7f16c14d7a2254dbfcd4fe5cb1c61d46dcfaefb1636ff60ad21512" exitCode=0 Dec 05 12:00:10 crc kubenswrapper[4728]: I1205 12:00:10.104389 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerDied","Data":"e0e26ad4bf7f16c14d7a2254dbfcd4fe5cb1c61d46dcfaefb1636ff60ad21512"} Dec 05 12:00:11 crc kubenswrapper[4728]: I1205 12:00:11.115285 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerStarted","Data":"8e679a5bbb95c340c7416dc11df0ba3c64ecd01b28a8f6a6607f84e7791d974a"} Dec 05 12:00:11 crc kubenswrapper[4728]: I1205 12:00:11.139876 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-92wkq" podStartSLOduration=2.652675398 podStartE2EDuration="6.139858048s" podCreationTimestamp="2025-12-05 12:00:05 +0000 UTC" firstStartedPulling="2025-12-05 12:00:07.070106784 +0000 UTC m=+3141.212229477" lastFinishedPulling="2025-12-05 12:00:10.557289434 +0000 UTC m=+3144.699412127" observedRunningTime="2025-12-05 12:00:11.134717251 +0000 UTC m=+3145.276839944" watchObservedRunningTime="2025-12-05 12:00:11.139858048 +0000 UTC m=+3145.281980741" Dec 05 12:00:16 crc kubenswrapper[4728]: I1205 12:00:16.049974 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:16 crc kubenswrapper[4728]: I1205 12:00:16.050573 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:16 crc kubenswrapper[4728]: I1205 12:00:16.115865 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:16 crc kubenswrapper[4728]: I1205 12:00:16.211090 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:16 crc kubenswrapper[4728]: I1205 12:00:16.350272 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:18 crc kubenswrapper[4728]: I1205 12:00:18.175510 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-92wkq" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="registry-server" containerID="cri-o://8e679a5bbb95c340c7416dc11df0ba3c64ecd01b28a8f6a6607f84e7791d974a" gracePeriod=2 Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.191267 4728 generic.go:334] "Generic (PLEG): container finished" podID="ee303524-640f-496f-a93d-3c0d187a93a9" containerID="8e679a5bbb95c340c7416dc11df0ba3c64ecd01b28a8f6a6607f84e7791d974a" exitCode=0 Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.191403 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerDied","Data":"8e679a5bbb95c340c7416dc11df0ba3c64ecd01b28a8f6a6607f84e7791d974a"} Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.380586 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.455061 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities\") pod \"ee303524-640f-496f-a93d-3c0d187a93a9\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.455291 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content\") pod \"ee303524-640f-496f-a93d-3c0d187a93a9\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.455419 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km4ts\" (UniqueName: \"kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts\") pod \"ee303524-640f-496f-a93d-3c0d187a93a9\" (UID: \"ee303524-640f-496f-a93d-3c0d187a93a9\") " Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.456303 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities" (OuterVolumeSpecName: "utilities") pod "ee303524-640f-496f-a93d-3c0d187a93a9" (UID: "ee303524-640f-496f-a93d-3c0d187a93a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.461050 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts" (OuterVolumeSpecName: "kube-api-access-km4ts") pod "ee303524-640f-496f-a93d-3c0d187a93a9" (UID: "ee303524-640f-496f-a93d-3c0d187a93a9"). InnerVolumeSpecName "kube-api-access-km4ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.512232 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee303524-640f-496f-a93d-3c0d187a93a9" (UID: "ee303524-640f-496f-a93d-3c0d187a93a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.558334 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.558372 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee303524-640f-496f-a93d-3c0d187a93a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:19 crc kubenswrapper[4728]: I1205 12:00:19.558384 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km4ts\" (UniqueName: \"kubernetes.io/projected/ee303524-640f-496f-a93d-3c0d187a93a9-kube-api-access-km4ts\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.202539 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92wkq" event={"ID":"ee303524-640f-496f-a93d-3c0d187a93a9","Type":"ContainerDied","Data":"a6346e7b8161b4ac4596488451ad809d113e0ca58248a8b3bcf6da2e46d58039"} Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.202598 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92wkq" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.202891 4728 scope.go:117] "RemoveContainer" containerID="8e679a5bbb95c340c7416dc11df0ba3c64ecd01b28a8f6a6607f84e7791d974a" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.223621 4728 scope.go:117] "RemoveContainer" containerID="e0e26ad4bf7f16c14d7a2254dbfcd4fe5cb1c61d46dcfaefb1636ff60ad21512" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.266025 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.268031 4728 scope.go:117] "RemoveContainer" containerID="38b9a53d4dc0db6350e691d5b6245b472a1b06281a81097cda45e12c4f65a717" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.303035 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-92wkq"] Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.353083 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:00:20 crc kubenswrapper[4728]: E1205 12:00:20.353579 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:00:20 crc kubenswrapper[4728]: I1205 12:00:20.369396 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" path="/var/lib/kubelet/pods/ee303524-640f-496f-a93d-3c0d187a93a9/volumes" Dec 05 12:00:33 crc kubenswrapper[4728]: I1205 12:00:33.352255 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:00:33 crc kubenswrapper[4728]: E1205 12:00:33.353082 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:00:46 crc kubenswrapper[4728]: I1205 12:00:46.361171 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:00:46 crc kubenswrapper[4728]: E1205 12:00:46.361938 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.157185 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415601-tk5jd"] Dec 05 12:01:00 crc kubenswrapper[4728]: E1205 12:01:00.158113 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="extract-utilities" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.158131 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="extract-utilities" Dec 05 12:01:00 crc kubenswrapper[4728]: E1205 12:01:00.158145 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="extract-content" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.158152 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="extract-content" Dec 05 12:01:00 crc kubenswrapper[4728]: E1205 12:01:00.158172 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="registry-server" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.158183 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="registry-server" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.158402 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee303524-640f-496f-a93d-3c0d187a93a9" containerName="registry-server" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.159160 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.175952 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415601-tk5jd"] Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.238524 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.238669 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.238697 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.238753 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5666b\" (UniqueName: \"kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.340698 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.341159 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.341308 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.342187 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5666b\" (UniqueName: \"kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.348110 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.348330 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.369310 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.376489 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5666b\" (UniqueName: \"kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b\") pod \"keystone-cron-29415601-tk5jd\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.487076 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:00 crc kubenswrapper[4728]: I1205 12:01:00.948403 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415601-tk5jd"] Dec 05 12:01:01 crc kubenswrapper[4728]: I1205 12:01:01.355768 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:01:01 crc kubenswrapper[4728]: E1205 12:01:01.356327 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:01 crc kubenswrapper[4728]: I1205 12:01:01.361788 4728 scope.go:117] "RemoveContainer" containerID="cfeb963bbf360c6a877e547c2446de6ad1ea69e30230bd56cf89b519551f3845" Dec 05 12:01:01 crc kubenswrapper[4728]: I1205 12:01:01.568287 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-tk5jd" event={"ID":"5ab6c45e-d05b-4ddc-92e0-5addedce425d","Type":"ContainerStarted","Data":"8374901f385a0ad6ff0a7cc499d83fe4dc41ae29afa06b2226106bb5c9394de4"} Dec 05 12:01:01 crc kubenswrapper[4728]: I1205 12:01:01.568339 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-tk5jd" event={"ID":"5ab6c45e-d05b-4ddc-92e0-5addedce425d","Type":"ContainerStarted","Data":"4a118b06bc9ff5b3a07777e268a0add59effa7320e95fa81b5b9c30aae493e76"} Dec 05 12:01:01 crc kubenswrapper[4728]: I1205 12:01:01.592079 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415601-tk5jd" podStartSLOduration=1.592056163 podStartE2EDuration="1.592056163s" podCreationTimestamp="2025-12-05 12:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:01:01.583992598 +0000 UTC m=+3195.726115301" watchObservedRunningTime="2025-12-05 12:01:01.592056163 +0000 UTC m=+3195.734178856" Dec 05 12:01:04 crc kubenswrapper[4728]: I1205 12:01:04.616848 4728 generic.go:334] "Generic (PLEG): container finished" podID="5ab6c45e-d05b-4ddc-92e0-5addedce425d" containerID="8374901f385a0ad6ff0a7cc499d83fe4dc41ae29afa06b2226106bb5c9394de4" exitCode=0 Dec 05 12:01:04 crc kubenswrapper[4728]: I1205 12:01:04.616913 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-tk5jd" event={"ID":"5ab6c45e-d05b-4ddc-92e0-5addedce425d","Type":"ContainerDied","Data":"8374901f385a0ad6ff0a7cc499d83fe4dc41ae29afa06b2226106bb5c9394de4"} Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.250164 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.290502 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle\") pod \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.290646 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data\") pod \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.290669 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys\") pod \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.290690 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5666b\" (UniqueName: \"kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b\") pod \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\" (UID: \"5ab6c45e-d05b-4ddc-92e0-5addedce425d\") " Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.296952 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b" (OuterVolumeSpecName: "kube-api-access-5666b") pod "5ab6c45e-d05b-4ddc-92e0-5addedce425d" (UID: "5ab6c45e-d05b-4ddc-92e0-5addedce425d"). InnerVolumeSpecName "kube-api-access-5666b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.299700 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5ab6c45e-d05b-4ddc-92e0-5addedce425d" (UID: "5ab6c45e-d05b-4ddc-92e0-5addedce425d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.341113 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ab6c45e-d05b-4ddc-92e0-5addedce425d" (UID: "5ab6c45e-d05b-4ddc-92e0-5addedce425d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.378921 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data" (OuterVolumeSpecName: "config-data") pod "5ab6c45e-d05b-4ddc-92e0-5addedce425d" (UID: "5ab6c45e-d05b-4ddc-92e0-5addedce425d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.393597 4728 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.393631 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.393645 4728 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ab6c45e-d05b-4ddc-92e0-5addedce425d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.393656 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5666b\" (UniqueName: \"kubernetes.io/projected/5ab6c45e-d05b-4ddc-92e0-5addedce425d-kube-api-access-5666b\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.640119 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-tk5jd" event={"ID":"5ab6c45e-d05b-4ddc-92e0-5addedce425d","Type":"ContainerDied","Data":"4a118b06bc9ff5b3a07777e268a0add59effa7320e95fa81b5b9c30aae493e76"} Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.640445 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a118b06bc9ff5b3a07777e268a0add59effa7320e95fa81b5b9c30aae493e76" Dec 05 12:01:06 crc kubenswrapper[4728]: I1205 12:01:06.640500 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-tk5jd" Dec 05 12:01:12 crc kubenswrapper[4728]: I1205 12:01:12.351894 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:01:12 crc kubenswrapper[4728]: E1205 12:01:12.352616 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.037732 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:15 crc kubenswrapper[4728]: E1205 12:01:15.038835 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ab6c45e-d05b-4ddc-92e0-5addedce425d" containerName="keystone-cron" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.038852 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ab6c45e-d05b-4ddc-92e0-5addedce425d" containerName="keystone-cron" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.039071 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ab6c45e-d05b-4ddc-92e0-5addedce425d" containerName="keystone-cron" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.040456 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.054481 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.186272 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.186368 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.186451 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjpwg\" (UniqueName: \"kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.288071 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.288142 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.288203 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjpwg\" (UniqueName: \"kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.288649 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.288684 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.311705 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjpwg\" (UniqueName: \"kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg\") pod \"redhat-marketplace-flwxc\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.376309 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:15 crc kubenswrapper[4728]: I1205 12:01:15.875135 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:16 crc kubenswrapper[4728]: I1205 12:01:16.726021 4728 generic.go:334] "Generic (PLEG): container finished" podID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerID="d4abd389bc40be4f3c75dced5e1a07824a2505a718cf465aa584beda3583efe5" exitCode=0 Dec 05 12:01:16 crc kubenswrapper[4728]: I1205 12:01:16.726136 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerDied","Data":"d4abd389bc40be4f3c75dced5e1a07824a2505a718cf465aa584beda3583efe5"} Dec 05 12:01:16 crc kubenswrapper[4728]: I1205 12:01:16.726339 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerStarted","Data":"51eb56456dcd430cdc79ba8c50db0e07352fdfdc39b26684aa3ed3dae1be5061"} Dec 05 12:01:17 crc kubenswrapper[4728]: I1205 12:01:17.740910 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerStarted","Data":"245d3c04f7c6ff257e2c39a9235d233c04199914c005fe5b4ceb2b6c1bf9ff9d"} Dec 05 12:01:18 crc kubenswrapper[4728]: I1205 12:01:18.752574 4728 generic.go:334] "Generic (PLEG): container finished" podID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerID="245d3c04f7c6ff257e2c39a9235d233c04199914c005fe5b4ceb2b6c1bf9ff9d" exitCode=0 Dec 05 12:01:18 crc kubenswrapper[4728]: I1205 12:01:18.752658 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerDied","Data":"245d3c04f7c6ff257e2c39a9235d233c04199914c005fe5b4ceb2b6c1bf9ff9d"} Dec 05 12:01:19 crc kubenswrapper[4728]: I1205 12:01:19.763513 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerStarted","Data":"cb0c2470b1faf0cff14aa1c74a43bc142638d927dd43b9648817d04e2556be20"} Dec 05 12:01:19 crc kubenswrapper[4728]: I1205 12:01:19.792534 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-flwxc" podStartSLOduration=2.3943105129999998 podStartE2EDuration="4.792515024s" podCreationTimestamp="2025-12-05 12:01:15 +0000 UTC" firstStartedPulling="2025-12-05 12:01:16.729097641 +0000 UTC m=+3210.871220334" lastFinishedPulling="2025-12-05 12:01:19.127302142 +0000 UTC m=+3213.269424845" observedRunningTime="2025-12-05 12:01:19.78522116 +0000 UTC m=+3213.927343863" watchObservedRunningTime="2025-12-05 12:01:19.792515024 +0000 UTC m=+3213.934637717" Dec 05 12:01:23 crc kubenswrapper[4728]: I1205 12:01:23.353036 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:01:23 crc kubenswrapper[4728]: E1205 12:01:23.353666 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:25 crc kubenswrapper[4728]: I1205 12:01:25.376922 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:25 crc kubenswrapper[4728]: I1205 12:01:25.377519 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:25 crc kubenswrapper[4728]: I1205 12:01:25.431866 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:25 crc kubenswrapper[4728]: I1205 12:01:25.860650 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:25 crc kubenswrapper[4728]: I1205 12:01:25.922502 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:27 crc kubenswrapper[4728]: I1205 12:01:27.835674 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-flwxc" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="registry-server" containerID="cri-o://cb0c2470b1faf0cff14aa1c74a43bc142638d927dd43b9648817d04e2556be20" gracePeriod=2 Dec 05 12:01:28 crc kubenswrapper[4728]: I1205 12:01:28.858052 4728 generic.go:334] "Generic (PLEG): container finished" podID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerID="cb0c2470b1faf0cff14aa1c74a43bc142638d927dd43b9648817d04e2556be20" exitCode=0 Dec 05 12:01:28 crc kubenswrapper[4728]: I1205 12:01:28.858100 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerDied","Data":"cb0c2470b1faf0cff14aa1c74a43bc142638d927dd43b9648817d04e2556be20"} Dec 05 12:01:28 crc kubenswrapper[4728]: I1205 12:01:28.970649 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.096091 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content\") pod \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.096275 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjpwg\" (UniqueName: \"kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg\") pod \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.096408 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities\") pod \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\" (UID: \"98f7cdc8-e03c-4720-895c-1a711ace0cdd\") " Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.098286 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities" (OuterVolumeSpecName: "utilities") pod "98f7cdc8-e03c-4720-895c-1a711ace0cdd" (UID: "98f7cdc8-e03c-4720-895c-1a711ace0cdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.118180 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg" (OuterVolumeSpecName: "kube-api-access-rjpwg") pod "98f7cdc8-e03c-4720-895c-1a711ace0cdd" (UID: "98f7cdc8-e03c-4720-895c-1a711ace0cdd"). InnerVolumeSpecName "kube-api-access-rjpwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.120175 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98f7cdc8-e03c-4720-895c-1a711ace0cdd" (UID: "98f7cdc8-e03c-4720-895c-1a711ace0cdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.199032 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.199324 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f7cdc8-e03c-4720-895c-1a711ace0cdd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.199430 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjpwg\" (UniqueName: \"kubernetes.io/projected/98f7cdc8-e03c-4720-895c-1a711ace0cdd-kube-api-access-rjpwg\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.869945 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flwxc" event={"ID":"98f7cdc8-e03c-4720-895c-1a711ace0cdd","Type":"ContainerDied","Data":"51eb56456dcd430cdc79ba8c50db0e07352fdfdc39b26684aa3ed3dae1be5061"} Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.869999 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flwxc" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.870262 4728 scope.go:117] "RemoveContainer" containerID="cb0c2470b1faf0cff14aa1c74a43bc142638d927dd43b9648817d04e2556be20" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.905714 4728 scope.go:117] "RemoveContainer" containerID="245d3c04f7c6ff257e2c39a9235d233c04199914c005fe5b4ceb2b6c1bf9ff9d" Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.924709 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.935849 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-flwxc"] Dec 05 12:01:29 crc kubenswrapper[4728]: I1205 12:01:29.938746 4728 scope.go:117] "RemoveContainer" containerID="d4abd389bc40be4f3c75dced5e1a07824a2505a718cf465aa584beda3583efe5" Dec 05 12:01:30 crc kubenswrapper[4728]: I1205 12:01:30.362634 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" path="/var/lib/kubelet/pods/98f7cdc8-e03c-4720-895c-1a711ace0cdd/volumes" Dec 05 12:01:37 crc kubenswrapper[4728]: I1205 12:01:37.351562 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:01:37 crc kubenswrapper[4728]: E1205 12:01:37.352361 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.352685 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:01:51 crc kubenswrapper[4728]: E1205 12:01:51.353509 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.564665 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:01:51 crc kubenswrapper[4728]: E1205 12:01:51.565137 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="extract-utilities" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.565156 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="extract-utilities" Dec 05 12:01:51 crc kubenswrapper[4728]: E1205 12:01:51.565174 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="extract-content" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.565182 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="extract-content" Dec 05 12:01:51 crc kubenswrapper[4728]: E1205 12:01:51.565195 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="registry-server" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.565203 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="registry-server" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.565459 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f7cdc8-e03c-4720-895c-1a711ace0cdd" containerName="registry-server" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.567044 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.577660 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.641401 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.641768 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.641912 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtgh6\" (UniqueName: \"kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.743929 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.744369 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.744469 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtgh6\" (UniqueName: \"kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.744479 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.745221 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.765430 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtgh6\" (UniqueName: \"kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6\") pod \"certified-operators-wwzht\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:51 crc kubenswrapper[4728]: I1205 12:01:51.893067 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:01:52 crc kubenswrapper[4728]: I1205 12:01:52.440017 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:01:53 crc kubenswrapper[4728]: I1205 12:01:53.140746 4728 generic.go:334] "Generic (PLEG): container finished" podID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerID="72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680" exitCode=0 Dec 05 12:01:53 crc kubenswrapper[4728]: I1205 12:01:53.140809 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerDied","Data":"72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680"} Dec 05 12:01:53 crc kubenswrapper[4728]: I1205 12:01:53.141049 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerStarted","Data":"fbee8800387d91aac8ca196b97da760c82afef29322c48b07e3ff485f827ed66"} Dec 05 12:01:55 crc kubenswrapper[4728]: I1205 12:01:55.161982 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerStarted","Data":"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52"} Dec 05 12:01:57 crc kubenswrapper[4728]: I1205 12:01:57.189939 4728 generic.go:334] "Generic (PLEG): container finished" podID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerID="1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52" exitCode=0 Dec 05 12:01:57 crc kubenswrapper[4728]: I1205 12:01:57.190017 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerDied","Data":"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52"} Dec 05 12:01:58 crc kubenswrapper[4728]: I1205 12:01:58.200223 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerStarted","Data":"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d"} Dec 05 12:01:58 crc kubenswrapper[4728]: I1205 12:01:58.224520 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wwzht" podStartSLOduration=2.735204887 podStartE2EDuration="7.224506227s" podCreationTimestamp="2025-12-05 12:01:51 +0000 UTC" firstStartedPulling="2025-12-05 12:01:53.142837844 +0000 UTC m=+3247.284960537" lastFinishedPulling="2025-12-05 12:01:57.632139184 +0000 UTC m=+3251.774261877" observedRunningTime="2025-12-05 12:01:58.222630057 +0000 UTC m=+3252.364752800" watchObservedRunningTime="2025-12-05 12:01:58.224506227 +0000 UTC m=+3252.366628920" Dec 05 12:02:01 crc kubenswrapper[4728]: I1205 12:02:01.894004 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:01 crc kubenswrapper[4728]: I1205 12:02:01.894539 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:01 crc kubenswrapper[4728]: I1205 12:02:01.953205 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:02 crc kubenswrapper[4728]: I1205 12:02:02.287479 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:02 crc kubenswrapper[4728]: I1205 12:02:02.345937 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:02:02 crc kubenswrapper[4728]: I1205 12:02:02.352068 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:02:02 crc kubenswrapper[4728]: E1205 12:02:02.352409 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:02:04 crc kubenswrapper[4728]: I1205 12:02:04.250021 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wwzht" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="registry-server" containerID="cri-o://e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d" gracePeriod=2 Dec 05 12:02:04 crc kubenswrapper[4728]: I1205 12:02:04.966394 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.125114 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtgh6\" (UniqueName: \"kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6\") pod \"921a69d6-107c-49eb-863b-bc9b9d2f65af\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.125604 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities\") pod \"921a69d6-107c-49eb-863b-bc9b9d2f65af\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.125716 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content\") pod \"921a69d6-107c-49eb-863b-bc9b9d2f65af\" (UID: \"921a69d6-107c-49eb-863b-bc9b9d2f65af\") " Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.126985 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities" (OuterVolumeSpecName: "utilities") pod "921a69d6-107c-49eb-863b-bc9b9d2f65af" (UID: "921a69d6-107c-49eb-863b-bc9b9d2f65af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.131527 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6" (OuterVolumeSpecName: "kube-api-access-gtgh6") pod "921a69d6-107c-49eb-863b-bc9b9d2f65af" (UID: "921a69d6-107c-49eb-863b-bc9b9d2f65af"). InnerVolumeSpecName "kube-api-access-gtgh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.183942 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "921a69d6-107c-49eb-863b-bc9b9d2f65af" (UID: "921a69d6-107c-49eb-863b-bc9b9d2f65af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.227582 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtgh6\" (UniqueName: \"kubernetes.io/projected/921a69d6-107c-49eb-863b-bc9b9d2f65af-kube-api-access-gtgh6\") on node \"crc\" DevicePath \"\"" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.227620 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.227631 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/921a69d6-107c-49eb-863b-bc9b9d2f65af-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.260109 4728 generic.go:334] "Generic (PLEG): container finished" podID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerID="e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d" exitCode=0 Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.260147 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerDied","Data":"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d"} Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.260176 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzht" event={"ID":"921a69d6-107c-49eb-863b-bc9b9d2f65af","Type":"ContainerDied","Data":"fbee8800387d91aac8ca196b97da760c82afef29322c48b07e3ff485f827ed66"} Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.260193 4728 scope.go:117] "RemoveContainer" containerID="e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.260221 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzht" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.285258 4728 scope.go:117] "RemoveContainer" containerID="1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.297020 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.310860 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wwzht"] Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.322364 4728 scope.go:117] "RemoveContainer" containerID="72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.362959 4728 scope.go:117] "RemoveContainer" containerID="e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d" Dec 05 12:02:05 crc kubenswrapper[4728]: E1205 12:02:05.363499 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d\": container with ID starting with e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d not found: ID does not exist" containerID="e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.363564 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d"} err="failed to get container status \"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d\": rpc error: code = NotFound desc = could not find container \"e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d\": container with ID starting with e590e3f0878bb7fbb306687088fdc99300ba1a6002ff0b6340ddd789bd4f7c9d not found: ID does not exist" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.363602 4728 scope.go:117] "RemoveContainer" containerID="1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52" Dec 05 12:02:05 crc kubenswrapper[4728]: E1205 12:02:05.364142 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52\": container with ID starting with 1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52 not found: ID does not exist" containerID="1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.364178 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52"} err="failed to get container status \"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52\": rpc error: code = NotFound desc = could not find container \"1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52\": container with ID starting with 1b9bb2bc429e5b689205a016341031fbb2c3fa5a5a5208ffea04989f5663ad52 not found: ID does not exist" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.364206 4728 scope.go:117] "RemoveContainer" containerID="72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680" Dec 05 12:02:05 crc kubenswrapper[4728]: E1205 12:02:05.364456 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680\": container with ID starting with 72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680 not found: ID does not exist" containerID="72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680" Dec 05 12:02:05 crc kubenswrapper[4728]: I1205 12:02:05.364489 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680"} err="failed to get container status \"72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680\": rpc error: code = NotFound desc = could not find container \"72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680\": container with ID starting with 72098056f19cdb5532efad63e01a815d45045f90cf1afb029fe5187d0a3d5680 not found: ID does not exist" Dec 05 12:02:06 crc kubenswrapper[4728]: I1205 12:02:06.362702 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" path="/var/lib/kubelet/pods/921a69d6-107c-49eb-863b-bc9b9d2f65af/volumes" Dec 05 12:02:17 crc kubenswrapper[4728]: I1205 12:02:17.352993 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:02:17 crc kubenswrapper[4728]: E1205 12:02:17.353896 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:02:29 crc kubenswrapper[4728]: I1205 12:02:29.353272 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:02:29 crc kubenswrapper[4728]: E1205 12:02:29.354352 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:02:43 crc kubenswrapper[4728]: I1205 12:02:43.352362 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:02:43 crc kubenswrapper[4728]: E1205 12:02:43.353110 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:02:57 crc kubenswrapper[4728]: I1205 12:02:57.352264 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:02:57 crc kubenswrapper[4728]: E1205 12:02:57.353094 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:03:12 crc kubenswrapper[4728]: I1205 12:03:12.352441 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:03:12 crc kubenswrapper[4728]: E1205 12:03:12.353333 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:03:23 crc kubenswrapper[4728]: I1205 12:03:23.353201 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:03:23 crc kubenswrapper[4728]: E1205 12:03:23.354432 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:03:37 crc kubenswrapper[4728]: I1205 12:03:37.351889 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:03:37 crc kubenswrapper[4728]: E1205 12:03:37.352587 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:03:49 crc kubenswrapper[4728]: I1205 12:03:49.354669 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:03:49 crc kubenswrapper[4728]: E1205 12:03:49.355459 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:04:00 crc kubenswrapper[4728]: I1205 12:04:00.352653 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:04:00 crc kubenswrapper[4728]: E1205 12:04:00.353280 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:04:13 crc kubenswrapper[4728]: I1205 12:04:13.353627 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:04:13 crc kubenswrapper[4728]: E1205 12:04:13.354414 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:04:25 crc kubenswrapper[4728]: I1205 12:04:25.352144 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:04:25 crc kubenswrapper[4728]: E1205 12:04:25.352937 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:04:39 crc kubenswrapper[4728]: I1205 12:04:39.352245 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:04:40 crc kubenswrapper[4728]: I1205 12:04:40.738218 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5"} Dec 05 12:06:55 crc kubenswrapper[4728]: I1205 12:06:55.702462 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:06:55 crc kubenswrapper[4728]: I1205 12:06:55.703060 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:07:25 crc kubenswrapper[4728]: I1205 12:07:25.701960 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:07:25 crc kubenswrapper[4728]: I1205 12:07:25.702552 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.082087 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:07:43 crc kubenswrapper[4728]: E1205 12:07:43.083010 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="extract-content" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.083024 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="extract-content" Dec 05 12:07:43 crc kubenswrapper[4728]: E1205 12:07:43.083047 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="extract-utilities" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.083053 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="extract-utilities" Dec 05 12:07:43 crc kubenswrapper[4728]: E1205 12:07:43.083070 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="registry-server" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.083077 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="registry-server" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.083311 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="921a69d6-107c-49eb-863b-bc9b9d2f65af" containerName="registry-server" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.084636 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.110751 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.173896 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.173957 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8plp\" (UniqueName: \"kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.174176 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.276440 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.276551 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.276604 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8plp\" (UniqueName: \"kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.277159 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.277729 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.298315 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8plp\" (UniqueName: \"kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp\") pod \"redhat-operators-9pslj\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.407995 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:43 crc kubenswrapper[4728]: I1205 12:07:43.914138 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:07:44 crc kubenswrapper[4728]: I1205 12:07:44.522837 4728 generic.go:334] "Generic (PLEG): container finished" podID="2fac3248-f264-45d6-a46c-48990ac25762" containerID="122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8" exitCode=0 Dec 05 12:07:44 crc kubenswrapper[4728]: I1205 12:07:44.522883 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerDied","Data":"122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8"} Dec 05 12:07:44 crc kubenswrapper[4728]: I1205 12:07:44.523120 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerStarted","Data":"793d8679dbcc22d7162ae52e76297cc9f23ebf7875936e32fd8a998489af50df"} Dec 05 12:07:44 crc kubenswrapper[4728]: I1205 12:07:44.525255 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:07:45 crc kubenswrapper[4728]: I1205 12:07:45.540413 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerStarted","Data":"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48"} Dec 05 12:07:49 crc kubenswrapper[4728]: I1205 12:07:49.583098 4728 generic.go:334] "Generic (PLEG): container finished" podID="2fac3248-f264-45d6-a46c-48990ac25762" containerID="22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48" exitCode=0 Dec 05 12:07:49 crc kubenswrapper[4728]: I1205 12:07:49.583630 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerDied","Data":"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48"} Dec 05 12:07:50 crc kubenswrapper[4728]: I1205 12:07:50.595423 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerStarted","Data":"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca"} Dec 05 12:07:50 crc kubenswrapper[4728]: I1205 12:07:50.637477 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9pslj" podStartSLOduration=2.163870885 podStartE2EDuration="7.637456955s" podCreationTimestamp="2025-12-05 12:07:43 +0000 UTC" firstStartedPulling="2025-12-05 12:07:44.524987913 +0000 UTC m=+3598.667110606" lastFinishedPulling="2025-12-05 12:07:49.998573983 +0000 UTC m=+3604.140696676" observedRunningTime="2025-12-05 12:07:50.621044878 +0000 UTC m=+3604.763167581" watchObservedRunningTime="2025-12-05 12:07:50.637456955 +0000 UTC m=+3604.779579648" Dec 05 12:07:53 crc kubenswrapper[4728]: I1205 12:07:53.409194 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:53 crc kubenswrapper[4728]: I1205 12:07:53.409925 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:07:54 crc kubenswrapper[4728]: I1205 12:07:54.472105 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9pslj" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="registry-server" probeResult="failure" output=< Dec 05 12:07:54 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 12:07:54 crc kubenswrapper[4728]: > Dec 05 12:07:55 crc kubenswrapper[4728]: I1205 12:07:55.702180 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:07:55 crc kubenswrapper[4728]: I1205 12:07:55.702250 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:07:55 crc kubenswrapper[4728]: I1205 12:07:55.702293 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:07:55 crc kubenswrapper[4728]: I1205 12:07:55.703020 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:07:55 crc kubenswrapper[4728]: I1205 12:07:55.703064 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5" gracePeriod=600 Dec 05 12:07:56 crc kubenswrapper[4728]: I1205 12:07:56.652231 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5" exitCode=0 Dec 05 12:07:56 crc kubenswrapper[4728]: I1205 12:07:56.652337 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5"} Dec 05 12:07:56 crc kubenswrapper[4728]: I1205 12:07:56.652786 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623"} Dec 05 12:07:56 crc kubenswrapper[4728]: I1205 12:07:56.652828 4728 scope.go:117] "RemoveContainer" containerID="7835518c087cce52627694f88ed4d8c74d1c85ad49c6e2245b200a55a9460994" Dec 05 12:08:03 crc kubenswrapper[4728]: I1205 12:08:03.458618 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:08:03 crc kubenswrapper[4728]: I1205 12:08:03.514323 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:08:03 crc kubenswrapper[4728]: I1205 12:08:03.697735 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:08:04 crc kubenswrapper[4728]: I1205 12:08:04.740589 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9pslj" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="registry-server" containerID="cri-o://0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca" gracePeriod=2 Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.701498 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.751360 4728 generic.go:334] "Generic (PLEG): container finished" podID="2fac3248-f264-45d6-a46c-48990ac25762" containerID="0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca" exitCode=0 Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.751403 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerDied","Data":"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca"} Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.751428 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9pslj" event={"ID":"2fac3248-f264-45d6-a46c-48990ac25762","Type":"ContainerDied","Data":"793d8679dbcc22d7162ae52e76297cc9f23ebf7875936e32fd8a998489af50df"} Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.751445 4728 scope.go:117] "RemoveContainer" containerID="0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.751451 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9pslj" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.770456 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8plp\" (UniqueName: \"kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp\") pod \"2fac3248-f264-45d6-a46c-48990ac25762\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.770504 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content\") pod \"2fac3248-f264-45d6-a46c-48990ac25762\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.770674 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities\") pod \"2fac3248-f264-45d6-a46c-48990ac25762\" (UID: \"2fac3248-f264-45d6-a46c-48990ac25762\") " Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.771688 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities" (OuterVolumeSpecName: "utilities") pod "2fac3248-f264-45d6-a46c-48990ac25762" (UID: "2fac3248-f264-45d6-a46c-48990ac25762"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.779223 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp" (OuterVolumeSpecName: "kube-api-access-v8plp") pod "2fac3248-f264-45d6-a46c-48990ac25762" (UID: "2fac3248-f264-45d6-a46c-48990ac25762"). InnerVolumeSpecName "kube-api-access-v8plp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.793518 4728 scope.go:117] "RemoveContainer" containerID="22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.843227 4728 scope.go:117] "RemoveContainer" containerID="122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.873760 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.873834 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8plp\" (UniqueName: \"kubernetes.io/projected/2fac3248-f264-45d6-a46c-48990ac25762-kube-api-access-v8plp\") on node \"crc\" DevicePath \"\"" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.899366 4728 scope.go:117] "RemoveContainer" containerID="0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca" Dec 05 12:08:05 crc kubenswrapper[4728]: E1205 12:08:05.899882 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca\": container with ID starting with 0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca not found: ID does not exist" containerID="0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.899913 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca"} err="failed to get container status \"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca\": rpc error: code = NotFound desc = could not find container \"0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca\": container with ID starting with 0b3a05b8a09f15632913ff1ef72ea6750ab0780516edcc8806569d220ba848ca not found: ID does not exist" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.899947 4728 scope.go:117] "RemoveContainer" containerID="22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48" Dec 05 12:08:05 crc kubenswrapper[4728]: E1205 12:08:05.900201 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48\": container with ID starting with 22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48 not found: ID does not exist" containerID="22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.900220 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48"} err="failed to get container status \"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48\": rpc error: code = NotFound desc = could not find container \"22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48\": container with ID starting with 22a1256f2e5872d1917d7c100cf93c02eca6d3938e07ed14c8a263dccb89bf48 not found: ID does not exist" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.900232 4728 scope.go:117] "RemoveContainer" containerID="122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8" Dec 05 12:08:05 crc kubenswrapper[4728]: E1205 12:08:05.900710 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8\": container with ID starting with 122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8 not found: ID does not exist" containerID="122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.900770 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8"} err="failed to get container status \"122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8\": rpc error: code = NotFound desc = could not find container \"122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8\": container with ID starting with 122a975c105d8be186dc1c1206288a2f235afd03597deedf5390337b734fa7b8 not found: ID does not exist" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.911907 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fac3248-f264-45d6-a46c-48990ac25762" (UID: "2fac3248-f264-45d6-a46c-48990ac25762"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:08:05 crc kubenswrapper[4728]: I1205 12:08:05.975532 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fac3248-f264-45d6-a46c-48990ac25762-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:08:06 crc kubenswrapper[4728]: I1205 12:08:06.131869 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:08:06 crc kubenswrapper[4728]: I1205 12:08:06.141121 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9pslj"] Dec 05 12:08:06 crc kubenswrapper[4728]: I1205 12:08:06.362041 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fac3248-f264-45d6-a46c-48990ac25762" path="/var/lib/kubelet/pods/2fac3248-f264-45d6-a46c-48990ac25762/volumes" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.913356 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:05 crc kubenswrapper[4728]: E1205 12:10:05.914124 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="registry-server" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.914137 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="registry-server" Dec 05 12:10:05 crc kubenswrapper[4728]: E1205 12:10:05.914149 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="extract-utilities" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.914155 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="extract-utilities" Dec 05 12:10:05 crc kubenswrapper[4728]: E1205 12:10:05.914188 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="extract-content" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.914193 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="extract-content" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.914370 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fac3248-f264-45d6-a46c-48990ac25762" containerName="registry-server" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.915630 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:05 crc kubenswrapper[4728]: I1205 12:10:05.942491 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.098320 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k57c\" (UniqueName: \"kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.099114 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.099196 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.201193 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.201480 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.201683 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k57c\" (UniqueName: \"kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.202284 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.202560 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.220735 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k57c\" (UniqueName: \"kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c\") pod \"community-operators-29tbp\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.236508 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:06 crc kubenswrapper[4728]: I1205 12:10:06.846123 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:07 crc kubenswrapper[4728]: I1205 12:10:07.828579 4728 generic.go:334] "Generic (PLEG): container finished" podID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerID="4c59e5071092bc4719c445e71558ca1e013b5790aaa10713b0dc96874c546aa8" exitCode=0 Dec 05 12:10:07 crc kubenswrapper[4728]: I1205 12:10:07.828690 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerDied","Data":"4c59e5071092bc4719c445e71558ca1e013b5790aaa10713b0dc96874c546aa8"} Dec 05 12:10:07 crc kubenswrapper[4728]: I1205 12:10:07.829058 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerStarted","Data":"da618472b98b5a2c80526e5ffa2b2480d1e4bb6bc898118e111db23216a54db0"} Dec 05 12:10:08 crc kubenswrapper[4728]: I1205 12:10:08.840070 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerStarted","Data":"08c49345276ca51b89876931af1d5ae73812eaf7728b713ec6198418f70602ab"} Dec 05 12:10:10 crc kubenswrapper[4728]: I1205 12:10:10.858656 4728 generic.go:334] "Generic (PLEG): container finished" podID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerID="08c49345276ca51b89876931af1d5ae73812eaf7728b713ec6198418f70602ab" exitCode=0 Dec 05 12:10:10 crc kubenswrapper[4728]: I1205 12:10:10.858726 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerDied","Data":"08c49345276ca51b89876931af1d5ae73812eaf7728b713ec6198418f70602ab"} Dec 05 12:10:11 crc kubenswrapper[4728]: I1205 12:10:11.870580 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerStarted","Data":"b28ee744d1e1bde84334135641ec2fc733338415ad113256210fa6199e48f4f3"} Dec 05 12:10:11 crc kubenswrapper[4728]: I1205 12:10:11.895524 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-29tbp" podStartSLOduration=3.422436358 podStartE2EDuration="6.89550394s" podCreationTimestamp="2025-12-05 12:10:05 +0000 UTC" firstStartedPulling="2025-12-05 12:10:07.831130865 +0000 UTC m=+3741.973253558" lastFinishedPulling="2025-12-05 12:10:11.304198447 +0000 UTC m=+3745.446321140" observedRunningTime="2025-12-05 12:10:11.887541968 +0000 UTC m=+3746.029664681" watchObservedRunningTime="2025-12-05 12:10:11.89550394 +0000 UTC m=+3746.037626633" Dec 05 12:10:16 crc kubenswrapper[4728]: I1205 12:10:16.237459 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:16 crc kubenswrapper[4728]: I1205 12:10:16.238022 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:16 crc kubenswrapper[4728]: I1205 12:10:16.290872 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:16 crc kubenswrapper[4728]: I1205 12:10:16.988197 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:17 crc kubenswrapper[4728]: I1205 12:10:17.042959 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:18 crc kubenswrapper[4728]: I1205 12:10:18.948911 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-29tbp" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="registry-server" containerID="cri-o://b28ee744d1e1bde84334135641ec2fc733338415ad113256210fa6199e48f4f3" gracePeriod=2 Dec 05 12:10:19 crc kubenswrapper[4728]: I1205 12:10:19.960598 4728 generic.go:334] "Generic (PLEG): container finished" podID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerID="b28ee744d1e1bde84334135641ec2fc733338415ad113256210fa6199e48f4f3" exitCode=0 Dec 05 12:10:19 crc kubenswrapper[4728]: I1205 12:10:19.960676 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerDied","Data":"b28ee744d1e1bde84334135641ec2fc733338415ad113256210fa6199e48f4f3"} Dec 05 12:10:19 crc kubenswrapper[4728]: I1205 12:10:19.961144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29tbp" event={"ID":"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2","Type":"ContainerDied","Data":"da618472b98b5a2c80526e5ffa2b2480d1e4bb6bc898118e111db23216a54db0"} Dec 05 12:10:19 crc kubenswrapper[4728]: I1205 12:10:19.961158 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da618472b98b5a2c80526e5ffa2b2480d1e4bb6bc898118e111db23216a54db0" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.007175 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.131429 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content\") pod \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.131562 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k57c\" (UniqueName: \"kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c\") pod \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.132543 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities\") pod \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\" (UID: \"a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2\") " Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.133298 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities" (OuterVolumeSpecName: "utilities") pod "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" (UID: "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.145590 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c" (OuterVolumeSpecName: "kube-api-access-4k57c") pod "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" (UID: "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2"). InnerVolumeSpecName "kube-api-access-4k57c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.195388 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" (UID: "a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.235512 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.235551 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k57c\" (UniqueName: \"kubernetes.io/projected/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-kube-api-access-4k57c\") on node \"crc\" DevicePath \"\"" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.235564 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:10:20 crc kubenswrapper[4728]: I1205 12:10:20.970426 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29tbp" Dec 05 12:10:21 crc kubenswrapper[4728]: I1205 12:10:21.000474 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:21 crc kubenswrapper[4728]: I1205 12:10:21.011832 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-29tbp"] Dec 05 12:10:22 crc kubenswrapper[4728]: I1205 12:10:22.362880 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" path="/var/lib/kubelet/pods/a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2/volumes" Dec 05 12:10:25 crc kubenswrapper[4728]: I1205 12:10:25.701910 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:10:25 crc kubenswrapper[4728]: I1205 12:10:25.703600 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:10:55 crc kubenswrapper[4728]: I1205 12:10:55.701876 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:10:55 crc kubenswrapper[4728]: I1205 12:10:55.702590 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:11:25 crc kubenswrapper[4728]: I1205 12:11:25.702586 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:11:25 crc kubenswrapper[4728]: I1205 12:11:25.703213 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:11:25 crc kubenswrapper[4728]: I1205 12:11:25.703263 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:11:25 crc kubenswrapper[4728]: I1205 12:11:25.704054 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:11:25 crc kubenswrapper[4728]: I1205 12:11:25.704102 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" gracePeriod=600 Dec 05 12:11:25 crc kubenswrapper[4728]: E1205 12:11:25.831613 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:11:26 crc kubenswrapper[4728]: I1205 12:11:26.592668 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" exitCode=0 Dec 05 12:11:26 crc kubenswrapper[4728]: I1205 12:11:26.592751 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623"} Dec 05 12:11:26 crc kubenswrapper[4728]: I1205 12:11:26.593266 4728 scope.go:117] "RemoveContainer" containerID="5038060e8639bee691a47af8962a19b3505521a582e70535eecf90485e24e4b5" Dec 05 12:11:26 crc kubenswrapper[4728]: I1205 12:11:26.594320 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:11:26 crc kubenswrapper[4728]: E1205 12:11:26.597244 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:11:37 crc kubenswrapper[4728]: I1205 12:11:37.352156 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:11:37 crc kubenswrapper[4728]: E1205 12:11:37.352953 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:11:51 crc kubenswrapper[4728]: I1205 12:11:51.352785 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:11:51 crc kubenswrapper[4728]: E1205 12:11:51.353705 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:12:06 crc kubenswrapper[4728]: I1205 12:12:06.359619 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:12:06 crc kubenswrapper[4728]: E1205 12:12:06.360468 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:12:18 crc kubenswrapper[4728]: I1205 12:12:18.352144 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:12:18 crc kubenswrapper[4728]: E1205 12:12:18.352883 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:12:29 crc kubenswrapper[4728]: I1205 12:12:29.352824 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:12:29 crc kubenswrapper[4728]: E1205 12:12:29.354568 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.142385 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:38 crc kubenswrapper[4728]: E1205 12:12:38.144135 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="extract-content" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.144153 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="extract-content" Dec 05 12:12:38 crc kubenswrapper[4728]: E1205 12:12:38.144369 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="registry-server" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.144375 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="registry-server" Dec 05 12:12:38 crc kubenswrapper[4728]: E1205 12:12:38.144405 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="extract-utilities" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.144414 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="extract-utilities" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.144669 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3ae6b5f-bd59-48d6-8d3d-6dba590f3af2" containerName="registry-server" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.146521 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.154418 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.256132 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.256440 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnnqh\" (UniqueName: \"kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.256778 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.358696 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.360134 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.360241 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnnqh\" (UniqueName: \"kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.359242 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.361009 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.403903 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnnqh\" (UniqueName: \"kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh\") pod \"certified-operators-2dhc9\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:38 crc kubenswrapper[4728]: I1205 12:12:38.491627 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:39 crc kubenswrapper[4728]: I1205 12:12:39.094036 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:39 crc kubenswrapper[4728]: I1205 12:12:39.303964 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerStarted","Data":"1503e73b6068a0b4cd3ba98dc68e28dfaa3002506bd8325628087bba86038c5b"} Dec 05 12:12:40 crc kubenswrapper[4728]: I1205 12:12:40.314616 4728 generic.go:334] "Generic (PLEG): container finished" podID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerID="c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190" exitCode=0 Dec 05 12:12:40 crc kubenswrapper[4728]: I1205 12:12:40.314728 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerDied","Data":"c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190"} Dec 05 12:12:42 crc kubenswrapper[4728]: I1205 12:12:42.336222 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerStarted","Data":"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1"} Dec 05 12:12:43 crc kubenswrapper[4728]: I1205 12:12:43.346128 4728 generic.go:334] "Generic (PLEG): container finished" podID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerID="d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1" exitCode=0 Dec 05 12:12:43 crc kubenswrapper[4728]: I1205 12:12:43.346214 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerDied","Data":"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1"} Dec 05 12:12:44 crc kubenswrapper[4728]: I1205 12:12:44.352045 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:12:44 crc kubenswrapper[4728]: E1205 12:12:44.352718 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:12:44 crc kubenswrapper[4728]: I1205 12:12:44.367944 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerStarted","Data":"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16"} Dec 05 12:12:44 crc kubenswrapper[4728]: I1205 12:12:44.389024 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2dhc9" podStartSLOduration=2.9728490560000003 podStartE2EDuration="6.389004864s" podCreationTimestamp="2025-12-05 12:12:38 +0000 UTC" firstStartedPulling="2025-12-05 12:12:40.318270648 +0000 UTC m=+3894.460393351" lastFinishedPulling="2025-12-05 12:12:43.734426456 +0000 UTC m=+3897.876549159" observedRunningTime="2025-12-05 12:12:44.379681395 +0000 UTC m=+3898.521804108" watchObservedRunningTime="2025-12-05 12:12:44.389004864 +0000 UTC m=+3898.531127557" Dec 05 12:12:48 crc kubenswrapper[4728]: I1205 12:12:48.492631 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:48 crc kubenswrapper[4728]: I1205 12:12:48.493133 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:48 crc kubenswrapper[4728]: I1205 12:12:48.547085 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:49 crc kubenswrapper[4728]: I1205 12:12:49.491617 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:49 crc kubenswrapper[4728]: I1205 12:12:49.553927 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:51 crc kubenswrapper[4728]: I1205 12:12:51.451077 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2dhc9" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="registry-server" containerID="cri-o://4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16" gracePeriod=2 Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.239495 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.336357 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnnqh\" (UniqueName: \"kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh\") pod \"5f334617-fe1b-4147-a688-cfcc0b853e39\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.336403 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities\") pod \"5f334617-fe1b-4147-a688-cfcc0b853e39\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.336464 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content\") pod \"5f334617-fe1b-4147-a688-cfcc0b853e39\" (UID: \"5f334617-fe1b-4147-a688-cfcc0b853e39\") " Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.337515 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities" (OuterVolumeSpecName: "utilities") pod "5f334617-fe1b-4147-a688-cfcc0b853e39" (UID: "5f334617-fe1b-4147-a688-cfcc0b853e39"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.342676 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh" (OuterVolumeSpecName: "kube-api-access-cnnqh") pod "5f334617-fe1b-4147-a688-cfcc0b853e39" (UID: "5f334617-fe1b-4147-a688-cfcc0b853e39"). InnerVolumeSpecName "kube-api-access-cnnqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.390270 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f334617-fe1b-4147-a688-cfcc0b853e39" (UID: "5f334617-fe1b-4147-a688-cfcc0b853e39"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.439225 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnnqh\" (UniqueName: \"kubernetes.io/projected/5f334617-fe1b-4147-a688-cfcc0b853e39-kube-api-access-cnnqh\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.439264 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.439276 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f334617-fe1b-4147-a688-cfcc0b853e39-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.462990 4728 generic.go:334] "Generic (PLEG): container finished" podID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerID="4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16" exitCode=0 Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.463054 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dhc9" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.463068 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerDied","Data":"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16"} Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.463323 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dhc9" event={"ID":"5f334617-fe1b-4147-a688-cfcc0b853e39","Type":"ContainerDied","Data":"1503e73b6068a0b4cd3ba98dc68e28dfaa3002506bd8325628087bba86038c5b"} Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.463347 4728 scope.go:117] "RemoveContainer" containerID="4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.499489 4728 scope.go:117] "RemoveContainer" containerID="d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.506950 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.517680 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2dhc9"] Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.526146 4728 scope.go:117] "RemoveContainer" containerID="c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.570025 4728 scope.go:117] "RemoveContainer" containerID="4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16" Dec 05 12:12:52 crc kubenswrapper[4728]: E1205 12:12:52.570695 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16\": container with ID starting with 4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16 not found: ID does not exist" containerID="4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.570744 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16"} err="failed to get container status \"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16\": rpc error: code = NotFound desc = could not find container \"4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16\": container with ID starting with 4e63e1bb3550e9789a0f0722a1e7d8d6739cfe086c6900ca6d627698c1248f16 not found: ID does not exist" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.570786 4728 scope.go:117] "RemoveContainer" containerID="d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1" Dec 05 12:12:52 crc kubenswrapper[4728]: E1205 12:12:52.571246 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1\": container with ID starting with d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1 not found: ID does not exist" containerID="d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.571304 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1"} err="failed to get container status \"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1\": rpc error: code = NotFound desc = could not find container \"d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1\": container with ID starting with d663ec893b4cc089e5229281b1ab3b29bb6dbeb8a4a5bfc5eb04a7950aac49c1 not found: ID does not exist" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.571337 4728 scope.go:117] "RemoveContainer" containerID="c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190" Dec 05 12:12:52 crc kubenswrapper[4728]: E1205 12:12:52.571725 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190\": container with ID starting with c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190 not found: ID does not exist" containerID="c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190" Dec 05 12:12:52 crc kubenswrapper[4728]: I1205 12:12:52.571752 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190"} err="failed to get container status \"c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190\": rpc error: code = NotFound desc = could not find container \"c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190\": container with ID starting with c8512e2e42f533c9b7a6b744329ea5a70a458835206207e43e489dbbfc359190 not found: ID does not exist" Dec 05 12:12:54 crc kubenswrapper[4728]: I1205 12:12:54.360778 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" path="/var/lib/kubelet/pods/5f334617-fe1b-4147-a688-cfcc0b853e39/volumes" Dec 05 12:12:55 crc kubenswrapper[4728]: I1205 12:12:55.353285 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:12:55 crc kubenswrapper[4728]: E1205 12:12:55.353718 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:13:10 crc kubenswrapper[4728]: I1205 12:13:10.354021 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:13:10 crc kubenswrapper[4728]: E1205 12:13:10.355340 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:13:22 crc kubenswrapper[4728]: I1205 12:13:22.352227 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:13:22 crc kubenswrapper[4728]: E1205 12:13:22.352906 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:13:37 crc kubenswrapper[4728]: I1205 12:13:37.354414 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:13:37 crc kubenswrapper[4728]: E1205 12:13:37.358216 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:13:51 crc kubenswrapper[4728]: I1205 12:13:51.352891 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:13:51 crc kubenswrapper[4728]: E1205 12:13:51.354102 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.110231 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:13:53 crc kubenswrapper[4728]: E1205 12:13:53.111074 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="extract-utilities" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.111105 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="extract-utilities" Dec 05 12:13:53 crc kubenswrapper[4728]: E1205 12:13:53.111142 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="extract-content" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.111153 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="extract-content" Dec 05 12:13:53 crc kubenswrapper[4728]: E1205 12:13:53.111172 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="registry-server" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.111182 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="registry-server" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.111453 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f334617-fe1b-4147-a688-cfcc0b853e39" containerName="registry-server" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.113439 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.122624 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.236573 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.236918 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnnmc\" (UniqueName: \"kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.236954 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.338480 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.338598 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnnmc\" (UniqueName: \"kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.338634 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.339201 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.339456 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.538616 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnnmc\" (UniqueName: \"kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc\") pod \"redhat-marketplace-2dthv\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:53 crc kubenswrapper[4728]: I1205 12:13:53.762391 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:13:54 crc kubenswrapper[4728]: I1205 12:13:54.264064 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:13:55 crc kubenswrapper[4728]: I1205 12:13:55.028309 4728 generic.go:334] "Generic (PLEG): container finished" podID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerID="d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1" exitCode=0 Dec 05 12:13:55 crc kubenswrapper[4728]: I1205 12:13:55.028451 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerDied","Data":"d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1"} Dec 05 12:13:55 crc kubenswrapper[4728]: I1205 12:13:55.028630 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerStarted","Data":"69dec6e378ea83b831e3678aef796128da447a21403964286c60bb4456e5f6f8"} Dec 05 12:13:55 crc kubenswrapper[4728]: I1205 12:13:55.030640 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:13:57 crc kubenswrapper[4728]: I1205 12:13:57.072041 4728 generic.go:334] "Generic (PLEG): container finished" podID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerID="232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317" exitCode=0 Dec 05 12:13:57 crc kubenswrapper[4728]: I1205 12:13:57.073145 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerDied","Data":"232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317"} Dec 05 12:13:58 crc kubenswrapper[4728]: I1205 12:13:58.083874 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerStarted","Data":"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79"} Dec 05 12:14:03 crc kubenswrapper[4728]: I1205 12:14:03.763139 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:03 crc kubenswrapper[4728]: I1205 12:14:03.764862 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:03 crc kubenswrapper[4728]: I1205 12:14:03.820126 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:03 crc kubenswrapper[4728]: I1205 12:14:03.844613 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2dthv" podStartSLOduration=8.36873017 podStartE2EDuration="10.844593835s" podCreationTimestamp="2025-12-05 12:13:53 +0000 UTC" firstStartedPulling="2025-12-05 12:13:55.030336039 +0000 UTC m=+3969.172458732" lastFinishedPulling="2025-12-05 12:13:57.506199704 +0000 UTC m=+3971.648322397" observedRunningTime="2025-12-05 12:13:58.107719077 +0000 UTC m=+3972.249841770" watchObservedRunningTime="2025-12-05 12:14:03.844593835 +0000 UTC m=+3977.986716538" Dec 05 12:14:04 crc kubenswrapper[4728]: I1205 12:14:04.186845 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:04 crc kubenswrapper[4728]: I1205 12:14:04.241745 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:14:05 crc kubenswrapper[4728]: I1205 12:14:05.352047 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:14:05 crc kubenswrapper[4728]: E1205 12:14:05.352358 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:14:06 crc kubenswrapper[4728]: I1205 12:14:06.183652 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2dthv" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="registry-server" containerID="cri-o://4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79" gracePeriod=2 Dec 05 12:14:06 crc kubenswrapper[4728]: I1205 12:14:06.994321 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.143319 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities\") pod \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.143529 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnnmc\" (UniqueName: \"kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc\") pod \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.143620 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content\") pod \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\" (UID: \"0ea19f72-30d9-42f3-a81f-d28bc88dd700\") " Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.146959 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities" (OuterVolumeSpecName: "utilities") pod "0ea19f72-30d9-42f3-a81f-d28bc88dd700" (UID: "0ea19f72-30d9-42f3-a81f-d28bc88dd700"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.162059 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc" (OuterVolumeSpecName: "kube-api-access-rnnmc") pod "0ea19f72-30d9-42f3-a81f-d28bc88dd700" (UID: "0ea19f72-30d9-42f3-a81f-d28bc88dd700"). InnerVolumeSpecName "kube-api-access-rnnmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.165645 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ea19f72-30d9-42f3-a81f-d28bc88dd700" (UID: "0ea19f72-30d9-42f3-a81f-d28bc88dd700"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.197766 4728 generic.go:334] "Generic (PLEG): container finished" podID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerID="4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79" exitCode=0 Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.197841 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerDied","Data":"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79"} Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.197916 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dthv" event={"ID":"0ea19f72-30d9-42f3-a81f-d28bc88dd700","Type":"ContainerDied","Data":"69dec6e378ea83b831e3678aef796128da447a21403964286c60bb4456e5f6f8"} Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.197944 4728 scope.go:117] "RemoveContainer" containerID="4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.199095 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dthv" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.243678 4728 scope.go:117] "RemoveContainer" containerID="232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.252461 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.252546 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ea19f72-30d9-42f3-a81f-d28bc88dd700-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.252560 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnnmc\" (UniqueName: \"kubernetes.io/projected/0ea19f72-30d9-42f3-a81f-d28bc88dd700-kube-api-access-rnnmc\") on node \"crc\" DevicePath \"\"" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.268784 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.277461 4728 scope.go:117] "RemoveContainer" containerID="d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.281024 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dthv"] Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.332805 4728 scope.go:117] "RemoveContainer" containerID="4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79" Dec 05 12:14:07 crc kubenswrapper[4728]: E1205 12:14:07.333560 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79\": container with ID starting with 4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79 not found: ID does not exist" containerID="4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.333602 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79"} err="failed to get container status \"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79\": rpc error: code = NotFound desc = could not find container \"4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79\": container with ID starting with 4d347e8db20d398da14bcd72cbd65a9aed8dd2a2421cab17610faabba9f1df79 not found: ID does not exist" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.333633 4728 scope.go:117] "RemoveContainer" containerID="232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317" Dec 05 12:14:07 crc kubenswrapper[4728]: E1205 12:14:07.334113 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317\": container with ID starting with 232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317 not found: ID does not exist" containerID="232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.334160 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317"} err="failed to get container status \"232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317\": rpc error: code = NotFound desc = could not find container \"232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317\": container with ID starting with 232fccd94503a2cb381cb5da8ac081f5f70fb1064174a08b1a3c1531ebb75317 not found: ID does not exist" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.334191 4728 scope.go:117] "RemoveContainer" containerID="d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1" Dec 05 12:14:07 crc kubenswrapper[4728]: E1205 12:14:07.334517 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1\": container with ID starting with d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1 not found: ID does not exist" containerID="d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1" Dec 05 12:14:07 crc kubenswrapper[4728]: I1205 12:14:07.334545 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1"} err="failed to get container status \"d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1\": rpc error: code = NotFound desc = could not find container \"d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1\": container with ID starting with d715cfabd961aa09a4d79c64bb4bc302ff1448e9f7a78f42186e02d7926582c1 not found: ID does not exist" Dec 05 12:14:08 crc kubenswrapper[4728]: I1205 12:14:08.362200 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" path="/var/lib/kubelet/pods/0ea19f72-30d9-42f3-a81f-d28bc88dd700/volumes" Dec 05 12:14:17 crc kubenswrapper[4728]: I1205 12:14:17.352993 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:14:17 crc kubenswrapper[4728]: E1205 12:14:17.354671 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:14:29 crc kubenswrapper[4728]: I1205 12:14:29.352650 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:14:29 crc kubenswrapper[4728]: E1205 12:14:29.353430 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:14:43 crc kubenswrapper[4728]: I1205 12:14:43.352526 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:14:43 crc kubenswrapper[4728]: E1205 12:14:43.353345 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:14:55 crc kubenswrapper[4728]: I1205 12:14:55.352374 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:14:55 crc kubenswrapper[4728]: E1205 12:14:55.353061 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.184226 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5"] Dec 05 12:15:00 crc kubenswrapper[4728]: E1205 12:15:00.185217 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="extract-content" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.185232 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="extract-content" Dec 05 12:15:00 crc kubenswrapper[4728]: E1205 12:15:00.185274 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="registry-server" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.185280 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="registry-server" Dec 05 12:15:00 crc kubenswrapper[4728]: E1205 12:15:00.185300 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="extract-utilities" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.185308 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="extract-utilities" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.185530 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ea19f72-30d9-42f3-a81f-d28bc88dd700" containerName="registry-server" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.186195 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.189703 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.189866 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.204357 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5"] Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.358371 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.358749 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.358902 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l654\" (UniqueName: \"kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.461221 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l654\" (UniqueName: \"kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.461328 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.462244 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.462433 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.906210 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l654\" (UniqueName: \"kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:00 crc kubenswrapper[4728]: I1205 12:15:00.907289 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume\") pod \"collect-profiles-29415615-n7ph5\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:01 crc kubenswrapper[4728]: I1205 12:15:01.118978 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:01 crc kubenswrapper[4728]: I1205 12:15:01.641371 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5"] Dec 05 12:15:01 crc kubenswrapper[4728]: I1205 12:15:01.933581 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" event={"ID":"af62dcbb-176a-4aca-9dd0-fa14c716871a","Type":"ContainerStarted","Data":"8a1d98952cbe63d1583984c04f749303599e596948eadcf7256f1c1979970693"} Dec 05 12:15:01 crc kubenswrapper[4728]: I1205 12:15:01.934026 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" event={"ID":"af62dcbb-176a-4aca-9dd0-fa14c716871a","Type":"ContainerStarted","Data":"a4bf954411cda353a366c97d852d2d7e4fb4a5fb7ee182b8beac931e9f0f1238"} Dec 05 12:15:01 crc kubenswrapper[4728]: I1205 12:15:01.954875 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" podStartSLOduration=1.954854442 podStartE2EDuration="1.954854442s" podCreationTimestamp="2025-12-05 12:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:15:01.948344419 +0000 UTC m=+4036.090467112" watchObservedRunningTime="2025-12-05 12:15:01.954854442 +0000 UTC m=+4036.096977125" Dec 05 12:15:02 crc kubenswrapper[4728]: I1205 12:15:02.945381 4728 generic.go:334] "Generic (PLEG): container finished" podID="af62dcbb-176a-4aca-9dd0-fa14c716871a" containerID="8a1d98952cbe63d1583984c04f749303599e596948eadcf7256f1c1979970693" exitCode=0 Dec 05 12:15:02 crc kubenswrapper[4728]: I1205 12:15:02.945444 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" event={"ID":"af62dcbb-176a-4aca-9dd0-fa14c716871a","Type":"ContainerDied","Data":"8a1d98952cbe63d1583984c04f749303599e596948eadcf7256f1c1979970693"} Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.564271 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.656360 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume\") pod \"af62dcbb-176a-4aca-9dd0-fa14c716871a\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.656774 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l654\" (UniqueName: \"kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654\") pod \"af62dcbb-176a-4aca-9dd0-fa14c716871a\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.656895 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume\") pod \"af62dcbb-176a-4aca-9dd0-fa14c716871a\" (UID: \"af62dcbb-176a-4aca-9dd0-fa14c716871a\") " Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.657212 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume" (OuterVolumeSpecName: "config-volume") pod "af62dcbb-176a-4aca-9dd0-fa14c716871a" (UID: "af62dcbb-176a-4aca-9dd0-fa14c716871a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.657759 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/af62dcbb-176a-4aca-9dd0-fa14c716871a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.669091 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654" (OuterVolumeSpecName: "kube-api-access-7l654") pod "af62dcbb-176a-4aca-9dd0-fa14c716871a" (UID: "af62dcbb-176a-4aca-9dd0-fa14c716871a"). InnerVolumeSpecName "kube-api-access-7l654". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.671367 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "af62dcbb-176a-4aca-9dd0-fa14c716871a" (UID: "af62dcbb-176a-4aca-9dd0-fa14c716871a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.701781 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2"] Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.715159 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-hv7p2"] Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.759876 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l654\" (UniqueName: \"kubernetes.io/projected/af62dcbb-176a-4aca-9dd0-fa14c716871a-kube-api-access-7l654\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.759906 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/af62dcbb-176a-4aca-9dd0-fa14c716871a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.964453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" event={"ID":"af62dcbb-176a-4aca-9dd0-fa14c716871a","Type":"ContainerDied","Data":"a4bf954411cda353a366c97d852d2d7e4fb4a5fb7ee182b8beac931e9f0f1238"} Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.964502 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4bf954411cda353a366c97d852d2d7e4fb4a5fb7ee182b8beac931e9f0f1238" Dec 05 12:15:04 crc kubenswrapper[4728]: I1205 12:15:04.964595 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-n7ph5" Dec 05 12:15:06 crc kubenswrapper[4728]: I1205 12:15:06.363552 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9773570d-8d08-4620-8911-40e5ecd02aef" path="/var/lib/kubelet/pods/9773570d-8d08-4620-8911-40e5ecd02aef/volumes" Dec 05 12:15:07 crc kubenswrapper[4728]: I1205 12:15:07.352138 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:15:07 crc kubenswrapper[4728]: E1205 12:15:07.352437 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:15:19 crc kubenswrapper[4728]: I1205 12:15:19.352446 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:15:19 crc kubenswrapper[4728]: E1205 12:15:19.353092 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:15:33 crc kubenswrapper[4728]: I1205 12:15:33.352372 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:15:33 crc kubenswrapper[4728]: E1205 12:15:33.353199 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:15:48 crc kubenswrapper[4728]: I1205 12:15:48.352615 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:15:48 crc kubenswrapper[4728]: E1205 12:15:48.353449 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:15:59 crc kubenswrapper[4728]: I1205 12:15:59.352442 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:15:59 crc kubenswrapper[4728]: E1205 12:15:59.353221 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:16:01 crc kubenswrapper[4728]: I1205 12:16:01.781451 4728 scope.go:117] "RemoveContainer" containerID="51a3d337d1fbf45a1a83f48dc99cbbc7fc7f463c92483cc0f5fae249c0eb0872" Dec 05 12:16:10 crc kubenswrapper[4728]: I1205 12:16:10.355124 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:16:10 crc kubenswrapper[4728]: E1205 12:16:10.355951 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:16:22 crc kubenswrapper[4728]: I1205 12:16:22.351778 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:16:22 crc kubenswrapper[4728]: E1205 12:16:22.352623 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:16:36 crc kubenswrapper[4728]: I1205 12:16:36.364717 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:16:36 crc kubenswrapper[4728]: I1205 12:16:36.794900 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7"} Dec 05 12:17:01 crc kubenswrapper[4728]: I1205 12:17:01.873570 4728 scope.go:117] "RemoveContainer" containerID="08c49345276ca51b89876931af1d5ae73812eaf7728b713ec6198418f70602ab" Dec 05 12:17:02 crc kubenswrapper[4728]: I1205 12:17:02.124338 4728 scope.go:117] "RemoveContainer" containerID="4c59e5071092bc4719c445e71558ca1e013b5790aaa10713b0dc96874c546aa8" Dec 05 12:17:02 crc kubenswrapper[4728]: I1205 12:17:02.177290 4728 scope.go:117] "RemoveContainer" containerID="b28ee744d1e1bde84334135641ec2fc733338415ad113256210fa6199e48f4f3" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.138054 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:17:47 crc kubenswrapper[4728]: E1205 12:17:47.139116 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af62dcbb-176a-4aca-9dd0-fa14c716871a" containerName="collect-profiles" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.139135 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="af62dcbb-176a-4aca-9dd0-fa14c716871a" containerName="collect-profiles" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.139430 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="af62dcbb-176a-4aca-9dd0-fa14c716871a" containerName="collect-profiles" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.141216 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.162080 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.246036 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.246119 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds2mx\" (UniqueName: \"kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.246405 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.348462 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.348537 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds2mx\" (UniqueName: \"kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.348596 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.349136 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.349132 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.378838 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds2mx\" (UniqueName: \"kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx\") pod \"redhat-operators-xnfts\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:47 crc kubenswrapper[4728]: I1205 12:17:47.490311 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:48 crc kubenswrapper[4728]: I1205 12:17:48.040516 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:17:48 crc kubenswrapper[4728]: I1205 12:17:48.453292 4728 generic.go:334] "Generic (PLEG): container finished" podID="06c1c505-6d78-46da-9965-622ca1b27127" containerID="5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596" exitCode=0 Dec 05 12:17:48 crc kubenswrapper[4728]: I1205 12:17:48.453342 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerDied","Data":"5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596"} Dec 05 12:17:48 crc kubenswrapper[4728]: I1205 12:17:48.453371 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerStarted","Data":"c93ffa65444fcfa64ac5b676ae5e0fde159681c7b2bbf8c9b42731a5d638251b"} Dec 05 12:17:49 crc kubenswrapper[4728]: I1205 12:17:49.465937 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerStarted","Data":"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81"} Dec 05 12:17:53 crc kubenswrapper[4728]: I1205 12:17:53.197675 4728 trace.go:236] Trace[1487474096]: "Calculate volume metrics of catalog-content for pod openshift-marketplace/redhat-operators-hvhhm" (05-Dec-2025 12:17:51.336) (total time: 1858ms): Dec 05 12:17:53 crc kubenswrapper[4728]: Trace[1487474096]: [1.858808927s] [1.858808927s] END Dec 05 12:17:54 crc kubenswrapper[4728]: I1205 12:17:54.122669 4728 generic.go:334] "Generic (PLEG): container finished" podID="06c1c505-6d78-46da-9965-622ca1b27127" containerID="2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81" exitCode=0 Dec 05 12:17:54 crc kubenswrapper[4728]: I1205 12:17:54.122735 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerDied","Data":"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81"} Dec 05 12:17:55 crc kubenswrapper[4728]: I1205 12:17:55.139781 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerStarted","Data":"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630"} Dec 05 12:17:55 crc kubenswrapper[4728]: I1205 12:17:55.177041 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xnfts" podStartSLOduration=1.7907983509999998 podStartE2EDuration="8.177025766s" podCreationTimestamp="2025-12-05 12:17:47 +0000 UTC" firstStartedPulling="2025-12-05 12:17:48.456070368 +0000 UTC m=+4202.598193061" lastFinishedPulling="2025-12-05 12:17:54.842297783 +0000 UTC m=+4208.984420476" observedRunningTime="2025-12-05 12:17:55.175951847 +0000 UTC m=+4209.318074560" watchObservedRunningTime="2025-12-05 12:17:55.177025766 +0000 UTC m=+4209.319148459" Dec 05 12:17:57 crc kubenswrapper[4728]: I1205 12:17:57.491266 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:57 crc kubenswrapper[4728]: I1205 12:17:57.491861 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:17:58 crc kubenswrapper[4728]: I1205 12:17:58.545955 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xnfts" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="registry-server" probeResult="failure" output=< Dec 05 12:17:58 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 12:17:58 crc kubenswrapper[4728]: > Dec 05 12:18:07 crc kubenswrapper[4728]: I1205 12:18:07.539109 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:18:07 crc kubenswrapper[4728]: I1205 12:18:07.599153 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:18:07 crc kubenswrapper[4728]: I1205 12:18:07.777092 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:18:09 crc kubenswrapper[4728]: I1205 12:18:09.280066 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xnfts" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="registry-server" containerID="cri-o://96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630" gracePeriod=2 Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.063451 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.173503 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content\") pod \"06c1c505-6d78-46da-9965-622ca1b27127\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.173768 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds2mx\" (UniqueName: \"kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx\") pod \"06c1c505-6d78-46da-9965-622ca1b27127\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.173910 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities\") pod \"06c1c505-6d78-46da-9965-622ca1b27127\" (UID: \"06c1c505-6d78-46da-9965-622ca1b27127\") " Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.174823 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities" (OuterVolumeSpecName: "utilities") pod "06c1c505-6d78-46da-9965-622ca1b27127" (UID: "06c1c505-6d78-46da-9965-622ca1b27127"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.276212 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.291057 4728 generic.go:334] "Generic (PLEG): container finished" podID="06c1c505-6d78-46da-9965-622ca1b27127" containerID="96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630" exitCode=0 Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.291356 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerDied","Data":"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630"} Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.291385 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xnfts" event={"ID":"06c1c505-6d78-46da-9965-622ca1b27127","Type":"ContainerDied","Data":"c93ffa65444fcfa64ac5b676ae5e0fde159681c7b2bbf8c9b42731a5d638251b"} Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.291403 4728 scope.go:117] "RemoveContainer" containerID="96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.291564 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xnfts" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.293176 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06c1c505-6d78-46da-9965-622ca1b27127" (UID: "06c1c505-6d78-46da-9965-622ca1b27127"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.328474 4728 scope.go:117] "RemoveContainer" containerID="2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.377903 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06c1c505-6d78-46da-9965-622ca1b27127-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.591757 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx" (OuterVolumeSpecName: "kube-api-access-ds2mx") pod "06c1c505-6d78-46da-9965-622ca1b27127" (UID: "06c1c505-6d78-46da-9965-622ca1b27127"). InnerVolumeSpecName "kube-api-access-ds2mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.609615 4728 scope.go:117] "RemoveContainer" containerID="5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.683966 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds2mx\" (UniqueName: \"kubernetes.io/projected/06c1c505-6d78-46da-9965-622ca1b27127-kube-api-access-ds2mx\") on node \"crc\" DevicePath \"\"" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.693377 4728 scope.go:117] "RemoveContainer" containerID="96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630" Dec 05 12:18:10 crc kubenswrapper[4728]: E1205 12:18:10.693980 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630\": container with ID starting with 96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630 not found: ID does not exist" containerID="96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.694019 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630"} err="failed to get container status \"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630\": rpc error: code = NotFound desc = could not find container \"96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630\": container with ID starting with 96c57877647397eaff1af7f0a708785209b3e64379972213997b0085f10ce630 not found: ID does not exist" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.694045 4728 scope.go:117] "RemoveContainer" containerID="2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81" Dec 05 12:18:10 crc kubenswrapper[4728]: E1205 12:18:10.694389 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81\": container with ID starting with 2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81 not found: ID does not exist" containerID="2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.694428 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81"} err="failed to get container status \"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81\": rpc error: code = NotFound desc = could not find container \"2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81\": container with ID starting with 2550daf11415e1d20aaf9498dee997371bcc446192702a868df453abc6fa0e81 not found: ID does not exist" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.694460 4728 scope.go:117] "RemoveContainer" containerID="5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596" Dec 05 12:18:10 crc kubenswrapper[4728]: E1205 12:18:10.694827 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596\": container with ID starting with 5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596 not found: ID does not exist" containerID="5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.694858 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596"} err="failed to get container status \"5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596\": rpc error: code = NotFound desc = could not find container \"5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596\": container with ID starting with 5edd8aa6f610b32307eb041e855c0f8d8b319f1e387b624ada78cfcdc3989596 not found: ID does not exist" Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.704423 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:18:10 crc kubenswrapper[4728]: I1205 12:18:10.717816 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xnfts"] Dec 05 12:18:12 crc kubenswrapper[4728]: I1205 12:18:12.363013 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c1c505-6d78-46da-9965-622ca1b27127" path="/var/lib/kubelet/pods/06c1c505-6d78-46da-9965-622ca1b27127/volumes" Dec 05 12:18:55 crc kubenswrapper[4728]: I1205 12:18:55.701693 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:18:55 crc kubenswrapper[4728]: I1205 12:18:55.702243 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:19:25 crc kubenswrapper[4728]: I1205 12:19:25.702392 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:19:25 crc kubenswrapper[4728]: I1205 12:19:25.702993 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:19:55 crc kubenswrapper[4728]: I1205 12:19:55.701871 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:19:55 crc kubenswrapper[4728]: I1205 12:19:55.702450 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:19:55 crc kubenswrapper[4728]: I1205 12:19:55.702506 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:19:55 crc kubenswrapper[4728]: I1205 12:19:55.703348 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:19:55 crc kubenswrapper[4728]: I1205 12:19:55.703415 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7" gracePeriod=600 Dec 05 12:19:56 crc kubenswrapper[4728]: I1205 12:19:56.230504 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7" exitCode=0 Dec 05 12:19:56 crc kubenswrapper[4728]: I1205 12:19:56.230576 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7"} Dec 05 12:19:56 crc kubenswrapper[4728]: I1205 12:19:56.230710 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63"} Dec 05 12:19:56 crc kubenswrapper[4728]: I1205 12:19:56.230731 4728 scope.go:117] "RemoveContainer" containerID="c681aae2a7c18de59995f0595b656c92ba330a744e4c4ab020e99b92f9b5d623" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.880872 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:20:58 crc kubenswrapper[4728]: E1205 12:20:58.882617 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="extract-utilities" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.882637 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="extract-utilities" Dec 05 12:20:58 crc kubenswrapper[4728]: E1205 12:20:58.882685 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="registry-server" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.882692 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="registry-server" Dec 05 12:20:58 crc kubenswrapper[4728]: E1205 12:20:58.882704 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="extract-content" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.882712 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="extract-content" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.882972 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c1c505-6d78-46da-9965-622ca1b27127" containerName="registry-server" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.888024 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.914270 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.993099 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbm56\" (UniqueName: \"kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.993408 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:58 crc kubenswrapper[4728]: I1205 12:20:58.993542 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.095172 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbm56\" (UniqueName: \"kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.095238 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.095265 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.095835 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.095947 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.115213 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbm56\" (UniqueName: \"kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56\") pod \"community-operators-rvjmx\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.222006 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:20:59 crc kubenswrapper[4728]: I1205 12:20:59.991437 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:21:00 crc kubenswrapper[4728]: I1205 12:21:00.841229 4728 generic.go:334] "Generic (PLEG): container finished" podID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerID="59db0420ba7b7916f59958f983c27ccb53d56575c76f14c560cbcc22da5d6a08" exitCode=0 Dec 05 12:21:00 crc kubenswrapper[4728]: I1205 12:21:00.843473 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerDied","Data":"59db0420ba7b7916f59958f983c27ccb53d56575c76f14c560cbcc22da5d6a08"} Dec 05 12:21:00 crc kubenswrapper[4728]: I1205 12:21:00.843505 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerStarted","Data":"d16b435f9c25a3157f41ac7ab2a52e6ff7bdf20384993e6efb435ed39c303447"} Dec 05 12:21:00 crc kubenswrapper[4728]: I1205 12:21:00.844851 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:21:01 crc kubenswrapper[4728]: I1205 12:21:01.852666 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerStarted","Data":"7078fbb1a5ca94748e8d38c9844a56fa63e389a3749517465ea03f429bcd8b25"} Dec 05 12:21:02 crc kubenswrapper[4728]: I1205 12:21:02.889862 4728 generic.go:334] "Generic (PLEG): container finished" podID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerID="7078fbb1a5ca94748e8d38c9844a56fa63e389a3749517465ea03f429bcd8b25" exitCode=0 Dec 05 12:21:02 crc kubenswrapper[4728]: I1205 12:21:02.890009 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerDied","Data":"7078fbb1a5ca94748e8d38c9844a56fa63e389a3749517465ea03f429bcd8b25"} Dec 05 12:21:03 crc kubenswrapper[4728]: I1205 12:21:03.903565 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerStarted","Data":"e554457ed4b4fec9d601dc1b347ab95a83730c5431d7f78bfe8e38944b341bc9"} Dec 05 12:21:03 crc kubenswrapper[4728]: I1205 12:21:03.932157 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rvjmx" podStartSLOduration=3.480463693 podStartE2EDuration="5.932132241s" podCreationTimestamp="2025-12-05 12:20:58 +0000 UTC" firstStartedPulling="2025-12-05 12:21:00.844490501 +0000 UTC m=+4394.986613194" lastFinishedPulling="2025-12-05 12:21:03.296159049 +0000 UTC m=+4397.438281742" observedRunningTime="2025-12-05 12:21:03.923667156 +0000 UTC m=+4398.065789849" watchObservedRunningTime="2025-12-05 12:21:03.932132241 +0000 UTC m=+4398.074254954" Dec 05 12:21:09 crc kubenswrapper[4728]: I1205 12:21:09.223371 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:09 crc kubenswrapper[4728]: I1205 12:21:09.224640 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:09 crc kubenswrapper[4728]: I1205 12:21:09.279325 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:10 crc kubenswrapper[4728]: I1205 12:21:10.148825 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:17 crc kubenswrapper[4728]: I1205 12:21:17.431016 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:21:17 crc kubenswrapper[4728]: I1205 12:21:17.431750 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rvjmx" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="registry-server" containerID="cri-o://e554457ed4b4fec9d601dc1b347ab95a83730c5431d7f78bfe8e38944b341bc9" gracePeriod=2 Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.046102 4728 generic.go:334] "Generic (PLEG): container finished" podID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerID="e554457ed4b4fec9d601dc1b347ab95a83730c5431d7f78bfe8e38944b341bc9" exitCode=0 Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.046164 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerDied","Data":"e554457ed4b4fec9d601dc1b347ab95a83730c5431d7f78bfe8e38944b341bc9"} Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.626018 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.820828 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbm56\" (UniqueName: \"kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56\") pod \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.820929 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content\") pod \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.821012 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities\") pod \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\" (UID: \"e1628372-2731-4902-9e9c-3f7d8d25cbd8\") " Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.821923 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities" (OuterVolumeSpecName: "utilities") pod "e1628372-2731-4902-9e9c-3f7d8d25cbd8" (UID: "e1628372-2731-4902-9e9c-3f7d8d25cbd8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.826153 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56" (OuterVolumeSpecName: "kube-api-access-wbm56") pod "e1628372-2731-4902-9e9c-3f7d8d25cbd8" (UID: "e1628372-2731-4902-9e9c-3f7d8d25cbd8"). InnerVolumeSpecName "kube-api-access-wbm56". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.871073 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1628372-2731-4902-9e9c-3f7d8d25cbd8" (UID: "e1628372-2731-4902-9e9c-3f7d8d25cbd8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.923536 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbm56\" (UniqueName: \"kubernetes.io/projected/e1628372-2731-4902-9e9c-3f7d8d25cbd8-kube-api-access-wbm56\") on node \"crc\" DevicePath \"\"" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.923571 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:21:18 crc kubenswrapper[4728]: I1205 12:21:18.923604 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1628372-2731-4902-9e9c-3f7d8d25cbd8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.057011 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rvjmx" event={"ID":"e1628372-2731-4902-9e9c-3f7d8d25cbd8","Type":"ContainerDied","Data":"d16b435f9c25a3157f41ac7ab2a52e6ff7bdf20384993e6efb435ed39c303447"} Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.057294 4728 scope.go:117] "RemoveContainer" containerID="e554457ed4b4fec9d601dc1b347ab95a83730c5431d7f78bfe8e38944b341bc9" Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.057087 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rvjmx" Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.102379 4728 scope.go:117] "RemoveContainer" containerID="7078fbb1a5ca94748e8d38c9844a56fa63e389a3749517465ea03f429bcd8b25" Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.105072 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.113375 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rvjmx"] Dec 05 12:21:19 crc kubenswrapper[4728]: I1205 12:21:19.714738 4728 scope.go:117] "RemoveContainer" containerID="59db0420ba7b7916f59958f983c27ccb53d56575c76f14c560cbcc22da5d6a08" Dec 05 12:21:20 crc kubenswrapper[4728]: I1205 12:21:20.375914 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" path="/var/lib/kubelet/pods/e1628372-2731-4902-9e9c-3f7d8d25cbd8/volumes" Dec 05 12:22:25 crc kubenswrapper[4728]: I1205 12:22:25.701889 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:22:25 crc kubenswrapper[4728]: I1205 12:22:25.702402 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.612723 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:43 crc kubenswrapper[4728]: E1205 12:22:43.613811 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="extract-content" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.613830 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="extract-content" Dec 05 12:22:43 crc kubenswrapper[4728]: E1205 12:22:43.613872 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="registry-server" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.613882 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="registry-server" Dec 05 12:22:43 crc kubenswrapper[4728]: E1205 12:22:43.613896 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="extract-utilities" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.613903 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="extract-utilities" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.614177 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1628372-2731-4902-9e9c-3f7d8d25cbd8" containerName="registry-server" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.616588 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.626694 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.661334 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.661776 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48288\" (UniqueName: \"kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.662352 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.764324 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.764378 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.764433 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48288\" (UniqueName: \"kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.765274 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.765484 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.795708 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48288\" (UniqueName: \"kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288\") pod \"certified-operators-zt5bc\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:43 crc kubenswrapper[4728]: I1205 12:22:43.947778 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:44 crc kubenswrapper[4728]: I1205 12:22:44.479494 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:44 crc kubenswrapper[4728]: I1205 12:22:44.833584 4728 generic.go:334] "Generic (PLEG): container finished" podID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerID="ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e" exitCode=0 Dec 05 12:22:44 crc kubenswrapper[4728]: I1205 12:22:44.833653 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerDied","Data":"ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e"} Dec 05 12:22:44 crc kubenswrapper[4728]: I1205 12:22:44.833893 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerStarted","Data":"f37be7b674a5308b62282e2cf1301745695477fb1be388cf648af88c95009063"} Dec 05 12:22:45 crc kubenswrapper[4728]: I1205 12:22:45.846236 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerStarted","Data":"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8"} Dec 05 12:22:46 crc kubenswrapper[4728]: I1205 12:22:46.861548 4728 generic.go:334] "Generic (PLEG): container finished" podID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerID="62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8" exitCode=0 Dec 05 12:22:46 crc kubenswrapper[4728]: I1205 12:22:46.861689 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerDied","Data":"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8"} Dec 05 12:22:47 crc kubenswrapper[4728]: I1205 12:22:47.872131 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerStarted","Data":"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d"} Dec 05 12:22:47 crc kubenswrapper[4728]: I1205 12:22:47.897574 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zt5bc" podStartSLOduration=2.4841598080000002 podStartE2EDuration="4.897549907s" podCreationTimestamp="2025-12-05 12:22:43 +0000 UTC" firstStartedPulling="2025-12-05 12:22:44.835305523 +0000 UTC m=+4498.977428216" lastFinishedPulling="2025-12-05 12:22:47.248695622 +0000 UTC m=+4501.390818315" observedRunningTime="2025-12-05 12:22:47.891983419 +0000 UTC m=+4502.034106102" watchObservedRunningTime="2025-12-05 12:22:47.897549907 +0000 UTC m=+4502.039672600" Dec 05 12:22:53 crc kubenswrapper[4728]: I1205 12:22:53.947976 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:53 crc kubenswrapper[4728]: I1205 12:22:53.948463 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:54 crc kubenswrapper[4728]: I1205 12:22:54.020041 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:54 crc kubenswrapper[4728]: I1205 12:22:54.986211 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:55 crc kubenswrapper[4728]: I1205 12:22:55.064144 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:55 crc kubenswrapper[4728]: I1205 12:22:55.701623 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:22:55 crc kubenswrapper[4728]: I1205 12:22:55.701960 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:22:56 crc kubenswrapper[4728]: I1205 12:22:56.949446 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zt5bc" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="registry-server" containerID="cri-o://9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d" gracePeriod=2 Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.633243 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.666366 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48288\" (UniqueName: \"kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288\") pod \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.667813 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content\") pod \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.667911 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities\") pod \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\" (UID: \"8daeea28-d953-42b9-8267-e51ecd3a5fbe\") " Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.668779 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities" (OuterVolumeSpecName: "utilities") pod "8daeea28-d953-42b9-8267-e51ecd3a5fbe" (UID: "8daeea28-d953-42b9-8267-e51ecd3a5fbe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.680072 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288" (OuterVolumeSpecName: "kube-api-access-48288") pod "8daeea28-d953-42b9-8267-e51ecd3a5fbe" (UID: "8daeea28-d953-42b9-8267-e51ecd3a5fbe"). InnerVolumeSpecName "kube-api-access-48288". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.717135 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8daeea28-d953-42b9-8267-e51ecd3a5fbe" (UID: "8daeea28-d953-42b9-8267-e51ecd3a5fbe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.769963 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.769998 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daeea28-d953-42b9-8267-e51ecd3a5fbe-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.770008 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48288\" (UniqueName: \"kubernetes.io/projected/8daeea28-d953-42b9-8267-e51ecd3a5fbe-kube-api-access-48288\") on node \"crc\" DevicePath \"\"" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.960405 4728 generic.go:334] "Generic (PLEG): container finished" podID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerID="9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d" exitCode=0 Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.960456 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zt5bc" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.960478 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerDied","Data":"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d"} Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.960907 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zt5bc" event={"ID":"8daeea28-d953-42b9-8267-e51ecd3a5fbe","Type":"ContainerDied","Data":"f37be7b674a5308b62282e2cf1301745695477fb1be388cf648af88c95009063"} Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.960931 4728 scope.go:117] "RemoveContainer" containerID="9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.983893 4728 scope.go:117] "RemoveContainer" containerID="62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8" Dec 05 12:22:57 crc kubenswrapper[4728]: I1205 12:22:57.998914 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.009706 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zt5bc"] Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.020570 4728 scope.go:117] "RemoveContainer" containerID="ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.052697 4728 scope.go:117] "RemoveContainer" containerID="9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d" Dec 05 12:22:58 crc kubenswrapper[4728]: E1205 12:22:58.053199 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d\": container with ID starting with 9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d not found: ID does not exist" containerID="9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.053237 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d"} err="failed to get container status \"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d\": rpc error: code = NotFound desc = could not find container \"9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d\": container with ID starting with 9e1cc5d1b61a0f3c3a4782439816ca724fd7b500257a5631b95690a93dd2694d not found: ID does not exist" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.053259 4728 scope.go:117] "RemoveContainer" containerID="62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8" Dec 05 12:22:58 crc kubenswrapper[4728]: E1205 12:22:58.053503 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8\": container with ID starting with 62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8 not found: ID does not exist" containerID="62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.053532 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8"} err="failed to get container status \"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8\": rpc error: code = NotFound desc = could not find container \"62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8\": container with ID starting with 62a790349d2ed9c957c6b193cee62f9934c8ef5f0164d4114e99a1b7116544d8 not found: ID does not exist" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.053550 4728 scope.go:117] "RemoveContainer" containerID="ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e" Dec 05 12:22:58 crc kubenswrapper[4728]: E1205 12:22:58.054499 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e\": container with ID starting with ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e not found: ID does not exist" containerID="ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.054524 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e"} err="failed to get container status \"ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e\": rpc error: code = NotFound desc = could not find container \"ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e\": container with ID starting with ed509ca2af93f99a569c85736fec7cb99fde9fb95570b9f9551dae596b7da38e not found: ID does not exist" Dec 05 12:22:58 crc kubenswrapper[4728]: I1205 12:22:58.364641 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" path="/var/lib/kubelet/pods/8daeea28-d953-42b9-8267-e51ecd3a5fbe/volumes" Dec 05 12:23:25 crc kubenswrapper[4728]: I1205 12:23:25.702378 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:23:25 crc kubenswrapper[4728]: I1205 12:23:25.702977 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:23:25 crc kubenswrapper[4728]: I1205 12:23:25.703030 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:23:25 crc kubenswrapper[4728]: I1205 12:23:25.703886 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:23:25 crc kubenswrapper[4728]: I1205 12:23:25.703934 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" gracePeriod=600 Dec 05 12:23:25 crc kubenswrapper[4728]: E1205 12:23:25.831569 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:23:26 crc kubenswrapper[4728]: I1205 12:23:26.227083 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" exitCode=0 Dec 05 12:23:26 crc kubenswrapper[4728]: I1205 12:23:26.227144 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63"} Dec 05 12:23:26 crc kubenswrapper[4728]: I1205 12:23:26.227758 4728 scope.go:117] "RemoveContainer" containerID="6c97ed5fd3cfbc81e5cc23b6a32c390369a932bc3c559ef5212dab8ab1cdeee7" Dec 05 12:23:26 crc kubenswrapper[4728]: I1205 12:23:26.228596 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:23:26 crc kubenswrapper[4728]: E1205 12:23:26.228920 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:23:39 crc kubenswrapper[4728]: I1205 12:23:39.352704 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:23:39 crc kubenswrapper[4728]: E1205 12:23:39.353470 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:23:52 crc kubenswrapper[4728]: I1205 12:23:52.352244 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:23:52 crc kubenswrapper[4728]: E1205 12:23:52.353238 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:24:04 crc kubenswrapper[4728]: I1205 12:24:04.353155 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:24:04 crc kubenswrapper[4728]: E1205 12:24:04.354235 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:24:19 crc kubenswrapper[4728]: I1205 12:24:19.352531 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:24:19 crc kubenswrapper[4728]: E1205 12:24:19.353497 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:24:32 crc kubenswrapper[4728]: I1205 12:24:32.352500 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:24:32 crc kubenswrapper[4728]: E1205 12:24:32.353227 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:24:45 crc kubenswrapper[4728]: I1205 12:24:45.352144 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:24:45 crc kubenswrapper[4728]: E1205 12:24:45.352953 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:24:56 crc kubenswrapper[4728]: I1205 12:24:56.364395 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:24:56 crc kubenswrapper[4728]: E1205 12:24:56.365193 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:25:09 crc kubenswrapper[4728]: I1205 12:25:09.352898 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:25:09 crc kubenswrapper[4728]: E1205 12:25:09.353732 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:25:20 crc kubenswrapper[4728]: I1205 12:25:20.352679 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:25:20 crc kubenswrapper[4728]: E1205 12:25:20.353480 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.757136 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:22 crc kubenswrapper[4728]: E1205 12:25:22.760003 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="extract-content" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.760113 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="extract-content" Dec 05 12:25:22 crc kubenswrapper[4728]: E1205 12:25:22.760221 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="registry-server" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.760320 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="registry-server" Dec 05 12:25:22 crc kubenswrapper[4728]: E1205 12:25:22.760437 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="extract-utilities" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.760531 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="extract-utilities" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.760889 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="8daeea28-d953-42b9-8267-e51ecd3a5fbe" containerName="registry-server" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.763030 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.769865 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.831944 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.832023 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8fkl\" (UniqueName: \"kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.832172 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.933575 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.933708 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.933743 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8fkl\" (UniqueName: \"kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.934122 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.934274 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:22 crc kubenswrapper[4728]: I1205 12:25:22.953149 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8fkl\" (UniqueName: \"kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl\") pod \"redhat-marketplace-k7729\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:23 crc kubenswrapper[4728]: I1205 12:25:23.089862 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:23 crc kubenswrapper[4728]: I1205 12:25:23.576725 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:24 crc kubenswrapper[4728]: I1205 12:25:24.335261 4728 generic.go:334] "Generic (PLEG): container finished" podID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerID="2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886" exitCode=0 Dec 05 12:25:24 crc kubenswrapper[4728]: I1205 12:25:24.335302 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerDied","Data":"2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886"} Dec 05 12:25:24 crc kubenswrapper[4728]: I1205 12:25:24.335329 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerStarted","Data":"fd03682bc40d7ca442acfae5b2ae0dcc09646f2a13877e61402fd53245d53a8b"} Dec 05 12:25:26 crc kubenswrapper[4728]: I1205 12:25:26.357272 4728 generic.go:334] "Generic (PLEG): container finished" podID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerID="d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3" exitCode=0 Dec 05 12:25:26 crc kubenswrapper[4728]: I1205 12:25:26.363199 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerDied","Data":"d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3"} Dec 05 12:25:27 crc kubenswrapper[4728]: I1205 12:25:27.367936 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerStarted","Data":"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e"} Dec 05 12:25:27 crc kubenswrapper[4728]: I1205 12:25:27.387222 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k7729" podStartSLOduration=2.970390263 podStartE2EDuration="5.387206753s" podCreationTimestamp="2025-12-05 12:25:22 +0000 UTC" firstStartedPulling="2025-12-05 12:25:24.337341679 +0000 UTC m=+4658.479464372" lastFinishedPulling="2025-12-05 12:25:26.754158169 +0000 UTC m=+4660.896280862" observedRunningTime="2025-12-05 12:25:27.384230964 +0000 UTC m=+4661.526353687" watchObservedRunningTime="2025-12-05 12:25:27.387206753 +0000 UTC m=+4661.529329446" Dec 05 12:25:31 crc kubenswrapper[4728]: I1205 12:25:31.352159 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:25:31 crc kubenswrapper[4728]: E1205 12:25:31.352953 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:25:33 crc kubenswrapper[4728]: I1205 12:25:33.090529 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:33 crc kubenswrapper[4728]: I1205 12:25:33.090846 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:33 crc kubenswrapper[4728]: I1205 12:25:33.144898 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:33 crc kubenswrapper[4728]: I1205 12:25:33.484971 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:33 crc kubenswrapper[4728]: I1205 12:25:33.532591 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:35 crc kubenswrapper[4728]: I1205 12:25:35.450474 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k7729" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="registry-server" containerID="cri-o://32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e" gracePeriod=2 Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.034188 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.183819 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities\") pod \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.184161 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content\") pod \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.184200 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8fkl\" (UniqueName: \"kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl\") pod \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\" (UID: \"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966\") " Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.185395 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities" (OuterVolumeSpecName: "utilities") pod "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" (UID: "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.194148 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl" (OuterVolumeSpecName: "kube-api-access-r8fkl") pod "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" (UID: "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966"). InnerVolumeSpecName "kube-api-access-r8fkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.209635 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" (UID: "e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.287421 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.287466 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.287481 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8fkl\" (UniqueName: \"kubernetes.io/projected/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966-kube-api-access-r8fkl\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.462777 4728 generic.go:334] "Generic (PLEG): container finished" podID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerID="32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e" exitCode=0 Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.462841 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerDied","Data":"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e"} Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.462874 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7729" event={"ID":"e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966","Type":"ContainerDied","Data":"fd03682bc40d7ca442acfae5b2ae0dcc09646f2a13877e61402fd53245d53a8b"} Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.462893 4728 scope.go:117] "RemoveContainer" containerID="32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.462904 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7729" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.494312 4728 scope.go:117] "RemoveContainer" containerID="d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.497250 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.509431 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7729"] Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.517205 4728 scope.go:117] "RemoveContainer" containerID="2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.563073 4728 scope.go:117] "RemoveContainer" containerID="32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e" Dec 05 12:25:36 crc kubenswrapper[4728]: E1205 12:25:36.563631 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e\": container with ID starting with 32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e not found: ID does not exist" containerID="32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.563689 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e"} err="failed to get container status \"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e\": rpc error: code = NotFound desc = could not find container \"32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e\": container with ID starting with 32cd4dfab8530b3fc5ee81f8189008f30698f34fd54df8b2d8499ea3df3aae0e not found: ID does not exist" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.563722 4728 scope.go:117] "RemoveContainer" containerID="d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3" Dec 05 12:25:36 crc kubenswrapper[4728]: E1205 12:25:36.564145 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3\": container with ID starting with d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3 not found: ID does not exist" containerID="d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.564172 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3"} err="failed to get container status \"d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3\": rpc error: code = NotFound desc = could not find container \"d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3\": container with ID starting with d6cddf64fe74c12ff0ec8a007021dd949e1099a8e328501b7f5ae35228b116d3 not found: ID does not exist" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.564192 4728 scope.go:117] "RemoveContainer" containerID="2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886" Dec 05 12:25:36 crc kubenswrapper[4728]: E1205 12:25:36.564595 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886\": container with ID starting with 2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886 not found: ID does not exist" containerID="2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886" Dec 05 12:25:36 crc kubenswrapper[4728]: I1205 12:25:36.564627 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886"} err="failed to get container status \"2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886\": rpc error: code = NotFound desc = could not find container \"2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886\": container with ID starting with 2336613f24c48988b0167f92af9f9bcc496130c0f52c34c49fed293eab82f886 not found: ID does not exist" Dec 05 12:25:38 crc kubenswrapper[4728]: I1205 12:25:38.365773 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" path="/var/lib/kubelet/pods/e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966/volumes" Dec 05 12:25:42 crc kubenswrapper[4728]: I1205 12:25:42.353862 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:25:42 crc kubenswrapper[4728]: E1205 12:25:42.354693 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:25:56 crc kubenswrapper[4728]: I1205 12:25:56.365027 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:25:56 crc kubenswrapper[4728]: E1205 12:25:56.365882 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:26:09 crc kubenswrapper[4728]: I1205 12:26:09.352836 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:26:09 crc kubenswrapper[4728]: E1205 12:26:09.355583 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:26:21 crc kubenswrapper[4728]: I1205 12:26:21.351783 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:26:21 crc kubenswrapper[4728]: E1205 12:26:21.352659 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:26:36 crc kubenswrapper[4728]: I1205 12:26:36.359375 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:26:36 crc kubenswrapper[4728]: E1205 12:26:36.360236 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:26:47 crc kubenswrapper[4728]: I1205 12:26:47.352148 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:26:47 crc kubenswrapper[4728]: E1205 12:26:47.352888 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:27:02 crc kubenswrapper[4728]: I1205 12:27:02.352539 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:27:02 crc kubenswrapper[4728]: E1205 12:27:02.353462 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:27:14 crc kubenswrapper[4728]: I1205 12:27:14.352365 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:27:14 crc kubenswrapper[4728]: E1205 12:27:14.353356 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:27:29 crc kubenswrapper[4728]: I1205 12:27:29.352323 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:27:29 crc kubenswrapper[4728]: E1205 12:27:29.353134 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:27:44 crc kubenswrapper[4728]: I1205 12:27:44.354413 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:27:44 crc kubenswrapper[4728]: E1205 12:27:44.355247 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:27:59 crc kubenswrapper[4728]: I1205 12:27:59.352637 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:27:59 crc kubenswrapper[4728]: E1205 12:27:59.353598 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:28:04 crc kubenswrapper[4728]: I1205 12:28:04.936770 4728 generic.go:334] "Generic (PLEG): container finished" podID="b71aa6bd-22ea-4144-84ea-a241546286a2" containerID="82c1c690c9ef66263b9288ab1da5afd24ef66692660e310f392679fcc9ca1cdb" exitCode=0 Dec 05 12:28:04 crc kubenswrapper[4728]: I1205 12:28:04.936885 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b71aa6bd-22ea-4144-84ea-a241546286a2","Type":"ContainerDied","Data":"82c1c690c9ef66263b9288ab1da5afd24ef66692660e310f392679fcc9ca1cdb"} Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.593044 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690489 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690556 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690592 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690683 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690727 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f5fb\" (UniqueName: \"kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690753 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690838 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.690988 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.691016 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config\") pod \"b71aa6bd-22ea-4144-84ea-a241546286a2\" (UID: \"b71aa6bd-22ea-4144-84ea-a241546286a2\") " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.692389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data" (OuterVolumeSpecName: "config-data") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.692389 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.692584 4728 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.692616 4728 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.699846 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.706099 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.706927 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb" (OuterVolumeSpecName: "kube-api-access-6f5fb") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "kube-api-access-6f5fb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.724588 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.726249 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.728544 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.754087 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "b71aa6bd-22ea-4144-84ea-a241546286a2" (UID: "b71aa6bd-22ea-4144-84ea-a241546286a2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.794683 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.794736 4728 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.794748 4728 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/b71aa6bd-22ea-4144-84ea-a241546286a2-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.794763 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f5fb\" (UniqueName: \"kubernetes.io/projected/b71aa6bd-22ea-4144-84ea-a241546286a2-kube-api-access-6f5fb\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.795475 4728 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.795503 4728 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/b71aa6bd-22ea-4144-84ea-a241546286a2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.795520 4728 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b71aa6bd-22ea-4144-84ea-a241546286a2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.816170 4728 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.897551 4728 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.971275 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"b71aa6bd-22ea-4144-84ea-a241546286a2","Type":"ContainerDied","Data":"909bae2cc483d74ed9ae06d98eb37cbac89332b7ed5239cb8ba080e718b8e78b"} Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.971345 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="909bae2cc483d74ed9ae06d98eb37cbac89332b7ed5239cb8ba080e718b8e78b" Dec 05 12:28:06 crc kubenswrapper[4728]: I1205 12:28:06.971356 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 12:28:12 crc kubenswrapper[4728]: I1205 12:28:12.352393 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:28:12 crc kubenswrapper[4728]: E1205 12:28:12.353283 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.614417 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 12:28:17 crc kubenswrapper[4728]: E1205 12:28:17.615603 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="extract-utilities" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.615624 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="extract-utilities" Dec 05 12:28:17 crc kubenswrapper[4728]: E1205 12:28:17.615645 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="registry-server" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.615653 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="registry-server" Dec 05 12:28:17 crc kubenswrapper[4728]: E1205 12:28:17.615675 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="extract-content" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.615684 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="extract-content" Dec 05 12:28:17 crc kubenswrapper[4728]: E1205 12:28:17.615707 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b71aa6bd-22ea-4144-84ea-a241546286a2" containerName="tempest-tests-tempest-tests-runner" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.615716 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="b71aa6bd-22ea-4144-84ea-a241546286a2" containerName="tempest-tests-tempest-tests-runner" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.616019 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="b71aa6bd-22ea-4144-84ea-a241546286a2" containerName="tempest-tests-tempest-tests-runner" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.616055 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="e21f8e0a-5f42-4fa9-a8ff-2d8c195b9966" containerName="registry-server" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.616974 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.623616 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.724998 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvdsd\" (UniqueName: \"kubernetes.io/projected/cbc422a5-ef18-4b9d-a3a4-c783d200dc25-kube-api-access-xvdsd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.725134 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.827075 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvdsd\" (UniqueName: \"kubernetes.io/projected/cbc422a5-ef18-4b9d-a3a4-c783d200dc25-kube-api-access-xvdsd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.827217 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.828108 4728 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.866560 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.896874 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvdsd\" (UniqueName: \"kubernetes.io/projected/cbc422a5-ef18-4b9d-a3a4-c783d200dc25-kube-api-access-xvdsd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cbc422a5-ef18-4b9d-a3a4-c783d200dc25\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:17 crc kubenswrapper[4728]: I1205 12:28:17.945132 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 12:28:18 crc kubenswrapper[4728]: I1205 12:28:18.417756 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 12:28:18 crc kubenswrapper[4728]: I1205 12:28:18.434291 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:28:19 crc kubenswrapper[4728]: I1205 12:28:19.087021 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cbc422a5-ef18-4b9d-a3a4-c783d200dc25","Type":"ContainerStarted","Data":"a43f44c3439acfd0510e1cd54c14c282c49b73a5b5f0e0a4b370549ea87d1ce6"} Dec 05 12:28:20 crc kubenswrapper[4728]: I1205 12:28:20.097773 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cbc422a5-ef18-4b9d-a3a4-c783d200dc25","Type":"ContainerStarted","Data":"fddeafabae38fe9ec1a1334a47819d48edd7b23e8f1b2c4aa4ec3cd38bcda198"} Dec 05 12:28:20 crc kubenswrapper[4728]: I1205 12:28:20.112617 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.726032912 podStartE2EDuration="3.112594591s" podCreationTimestamp="2025-12-05 12:28:17 +0000 UTC" firstStartedPulling="2025-12-05 12:28:18.434082551 +0000 UTC m=+4832.576205244" lastFinishedPulling="2025-12-05 12:28:19.82064422 +0000 UTC m=+4833.962766923" observedRunningTime="2025-12-05 12:28:20.109657203 +0000 UTC m=+4834.251779896" watchObservedRunningTime="2025-12-05 12:28:20.112594591 +0000 UTC m=+4834.254717314" Dec 05 12:28:23 crc kubenswrapper[4728]: I1205 12:28:23.351993 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:28:23 crc kubenswrapper[4728]: E1205 12:28:23.352680 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:28:34 crc kubenswrapper[4728]: I1205 12:28:34.351970 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:28:35 crc kubenswrapper[4728]: I1205 12:28:35.252550 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2"} Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.301986 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2zwd/must-gather-mjtfz"] Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.304231 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.305991 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m2zwd"/"default-dockercfg-t7dlq" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.306647 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m2zwd"/"kube-root-ca.crt" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.306835 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m2zwd"/"openshift-service-ca.crt" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.316603 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m2zwd/must-gather-mjtfz"] Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.441106 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.441450 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnbfm\" (UniqueName: \"kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.543021 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnbfm\" (UniqueName: \"kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.543080 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.543464 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.572635 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnbfm\" (UniqueName: \"kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm\") pod \"must-gather-mjtfz\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:44 crc kubenswrapper[4728]: I1205 12:28:44.622241 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:28:45 crc kubenswrapper[4728]: I1205 12:28:45.066566 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m2zwd/must-gather-mjtfz"] Dec 05 12:28:45 crc kubenswrapper[4728]: I1205 12:28:45.355711 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" event={"ID":"6d145479-9865-48c2-9c3a-34b5937af539","Type":"ContainerStarted","Data":"2c0051718ac26d6f3b8c7ba20e9677436c7d749f98931ff0d7e74012cb8189dc"} Dec 05 12:28:50 crc kubenswrapper[4728]: I1205 12:28:50.420089 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" event={"ID":"6d145479-9865-48c2-9c3a-34b5937af539","Type":"ContainerStarted","Data":"661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339"} Dec 05 12:28:50 crc kubenswrapper[4728]: I1205 12:28:50.420587 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" event={"ID":"6d145479-9865-48c2-9c3a-34b5937af539","Type":"ContainerStarted","Data":"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89"} Dec 05 12:28:50 crc kubenswrapper[4728]: I1205 12:28:50.441908 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" podStartSLOduration=2.208742552 podStartE2EDuration="6.441884498s" podCreationTimestamp="2025-12-05 12:28:44 +0000 UTC" firstStartedPulling="2025-12-05 12:28:45.071314499 +0000 UTC m=+4859.213437192" lastFinishedPulling="2025-12-05 12:28:49.304456445 +0000 UTC m=+4863.446579138" observedRunningTime="2025-12-05 12:28:50.438256211 +0000 UTC m=+4864.580378914" watchObservedRunningTime="2025-12-05 12:28:50.441884498 +0000 UTC m=+4864.584007201" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.342004 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-xjqwv"] Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.347553 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.443714 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjjxl\" (UniqueName: \"kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.443937 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.545931 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.546132 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjjxl\" (UniqueName: \"kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.546405 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.575203 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjjxl\" (UniqueName: \"kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl\") pod \"crc-debug-xjqwv\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:55 crc kubenswrapper[4728]: I1205 12:28:55.667991 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:28:56 crc kubenswrapper[4728]: I1205 12:28:56.475781 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" event={"ID":"10ba31d4-b71d-49ff-a687-683ee165449e","Type":"ContainerStarted","Data":"85133aa8f0884e491a96ef48f30ee3aa2e15c1f4a19cac8132976893a7afcf2c"} Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.027856 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.030857 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.037517 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.171628 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.171714 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf6z5\" (UniqueName: \"kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.172053 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.275200 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.275364 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf6z5\" (UniqueName: \"kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.275677 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.275803 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.276208 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.304029 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf6z5\" (UniqueName: \"kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5\") pod \"redhat-operators-w2v47\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:02 crc kubenswrapper[4728]: I1205 12:29:02.363191 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:06 crc kubenswrapper[4728]: I1205 12:29:06.950321 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:06 crc kubenswrapper[4728]: W1205 12:29:06.957434 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa5787ff_69c1_44da_a0f1_dd0f51635a40.slice/crio-adf0dce061c39d741a53f1ca416f7b9003f92b71ae3400999fd6f484867e2638 WatchSource:0}: Error finding container adf0dce061c39d741a53f1ca416f7b9003f92b71ae3400999fd6f484867e2638: Status 404 returned error can't find the container with id adf0dce061c39d741a53f1ca416f7b9003f92b71ae3400999fd6f484867e2638 Dec 05 12:29:07 crc kubenswrapper[4728]: I1205 12:29:07.608548 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerStarted","Data":"adf0dce061c39d741a53f1ca416f7b9003f92b71ae3400999fd6f484867e2638"} Dec 05 12:29:08 crc kubenswrapper[4728]: I1205 12:29:08.621125 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" event={"ID":"10ba31d4-b71d-49ff-a687-683ee165449e","Type":"ContainerStarted","Data":"0567d6c7950c6478d5e5593e27ebd874eeb2092f51f55ccf2847d2157b6ed4b6"} Dec 05 12:29:08 crc kubenswrapper[4728]: I1205 12:29:08.625500 4728 generic.go:334] "Generic (PLEG): container finished" podID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerID="636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223" exitCode=0 Dec 05 12:29:08 crc kubenswrapper[4728]: I1205 12:29:08.625540 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerDied","Data":"636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223"} Dec 05 12:29:08 crc kubenswrapper[4728]: I1205 12:29:08.641066 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" podStartSLOduration=2.772339659 podStartE2EDuration="13.641044504s" podCreationTimestamp="2025-12-05 12:28:55 +0000 UTC" firstStartedPulling="2025-12-05 12:28:55.709879226 +0000 UTC m=+4869.852001919" lastFinishedPulling="2025-12-05 12:29:06.578584071 +0000 UTC m=+4880.720706764" observedRunningTime="2025-12-05 12:29:08.639160964 +0000 UTC m=+4882.781283667" watchObservedRunningTime="2025-12-05 12:29:08.641044504 +0000 UTC m=+4882.783167197" Dec 05 12:29:09 crc kubenswrapper[4728]: I1205 12:29:09.636486 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerStarted","Data":"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701"} Dec 05 12:29:12 crc kubenswrapper[4728]: I1205 12:29:12.664588 4728 generic.go:334] "Generic (PLEG): container finished" podID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerID="b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701" exitCode=0 Dec 05 12:29:12 crc kubenswrapper[4728]: I1205 12:29:12.665195 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerDied","Data":"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701"} Dec 05 12:29:21 crc kubenswrapper[4728]: I1205 12:29:21.751555 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerStarted","Data":"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d"} Dec 05 12:29:21 crc kubenswrapper[4728]: I1205 12:29:21.793084 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w2v47" podStartSLOduration=9.689649086 podStartE2EDuration="20.79306327s" podCreationTimestamp="2025-12-05 12:29:01 +0000 UTC" firstStartedPulling="2025-12-05 12:29:08.627368199 +0000 UTC m=+4882.769490892" lastFinishedPulling="2025-12-05 12:29:19.730782383 +0000 UTC m=+4893.872905076" observedRunningTime="2025-12-05 12:29:21.773339083 +0000 UTC m=+4895.915461776" watchObservedRunningTime="2025-12-05 12:29:21.79306327 +0000 UTC m=+4895.935185973" Dec 05 12:29:22 crc kubenswrapper[4728]: I1205 12:29:22.364036 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:22 crc kubenswrapper[4728]: I1205 12:29:22.364693 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:23 crc kubenswrapper[4728]: I1205 12:29:23.426226 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-w2v47" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="registry-server" probeResult="failure" output=< Dec 05 12:29:23 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 12:29:23 crc kubenswrapper[4728]: > Dec 05 12:29:32 crc kubenswrapper[4728]: I1205 12:29:32.420329 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:32 crc kubenswrapper[4728]: I1205 12:29:32.473656 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:33 crc kubenswrapper[4728]: I1205 12:29:33.221884 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:33 crc kubenswrapper[4728]: I1205 12:29:33.883357 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w2v47" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="registry-server" containerID="cri-o://45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d" gracePeriod=2 Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.419475 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.545132 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities\") pod \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.545324 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf6z5\" (UniqueName: \"kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5\") pod \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.545399 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content\") pod \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\" (UID: \"fa5787ff-69c1-44da-a0f1-dd0f51635a40\") " Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.546319 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities" (OuterVolumeSpecName: "utilities") pod "fa5787ff-69c1-44da-a0f1-dd0f51635a40" (UID: "fa5787ff-69c1-44da-a0f1-dd0f51635a40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.546575 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.566628 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5" (OuterVolumeSpecName: "kube-api-access-cf6z5") pod "fa5787ff-69c1-44da-a0f1-dd0f51635a40" (UID: "fa5787ff-69c1-44da-a0f1-dd0f51635a40"). InnerVolumeSpecName "kube-api-access-cf6z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.648687 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf6z5\" (UniqueName: \"kubernetes.io/projected/fa5787ff-69c1-44da-a0f1-dd0f51635a40-kube-api-access-cf6z5\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.670233 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa5787ff-69c1-44da-a0f1-dd0f51635a40" (UID: "fa5787ff-69c1-44da-a0f1-dd0f51635a40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.751483 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5787ff-69c1-44da-a0f1-dd0f51635a40-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.895249 4728 generic.go:334] "Generic (PLEG): container finished" podID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerID="45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d" exitCode=0 Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.895346 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerDied","Data":"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d"} Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.895610 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w2v47" event={"ID":"fa5787ff-69c1-44da-a0f1-dd0f51635a40","Type":"ContainerDied","Data":"adf0dce061c39d741a53f1ca416f7b9003f92b71ae3400999fd6f484867e2638"} Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.895632 4728 scope.go:117] "RemoveContainer" containerID="45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.895368 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w2v47" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.916233 4728 scope.go:117] "RemoveContainer" containerID="b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.933617 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.945195 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w2v47"] Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.957218 4728 scope.go:117] "RemoveContainer" containerID="636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.984910 4728 scope.go:117] "RemoveContainer" containerID="45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d" Dec 05 12:29:34 crc kubenswrapper[4728]: E1205 12:29:34.985357 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d\": container with ID starting with 45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d not found: ID does not exist" containerID="45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.985449 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d"} err="failed to get container status \"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d\": rpc error: code = NotFound desc = could not find container \"45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d\": container with ID starting with 45ba97b00730863bfb2891d42ee2add72bb631706676310e5cd7033cb5325f8d not found: ID does not exist" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.985528 4728 scope.go:117] "RemoveContainer" containerID="b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701" Dec 05 12:29:34 crc kubenswrapper[4728]: E1205 12:29:34.986125 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701\": container with ID starting with b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701 not found: ID does not exist" containerID="b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.986156 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701"} err="failed to get container status \"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701\": rpc error: code = NotFound desc = could not find container \"b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701\": container with ID starting with b70decaea2596397e0f282b8bf04728201132d18639bf42b1f3ac5f7d2a3f701 not found: ID does not exist" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.986177 4728 scope.go:117] "RemoveContainer" containerID="636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223" Dec 05 12:29:34 crc kubenswrapper[4728]: E1205 12:29:34.986398 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223\": container with ID starting with 636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223 not found: ID does not exist" containerID="636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223" Dec 05 12:29:34 crc kubenswrapper[4728]: I1205 12:29:34.986491 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223"} err="failed to get container status \"636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223\": rpc error: code = NotFound desc = could not find container \"636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223\": container with ID starting with 636edf6a5653043082a0358c435845719760772838c1abaf03a3e7a976230223 not found: ID does not exist" Dec 05 12:29:36 crc kubenswrapper[4728]: I1205 12:29:36.370165 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" path="/var/lib/kubelet/pods/fa5787ff-69c1-44da-a0f1-dd0f51635a40/volumes" Dec 05 12:29:58 crc kubenswrapper[4728]: I1205 12:29:58.148478 4728 generic.go:334] "Generic (PLEG): container finished" podID="10ba31d4-b71d-49ff-a687-683ee165449e" containerID="0567d6c7950c6478d5e5593e27ebd874eeb2092f51f55ccf2847d2157b6ed4b6" exitCode=0 Dec 05 12:29:58 crc kubenswrapper[4728]: I1205 12:29:58.148716 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" event={"ID":"10ba31d4-b71d-49ff-a687-683ee165449e","Type":"ContainerDied","Data":"0567d6c7950c6478d5e5593e27ebd874eeb2092f51f55ccf2847d2157b6ed4b6"} Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.282252 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.312240 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-xjqwv"] Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.320214 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-xjqwv"] Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.361105 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjjxl\" (UniqueName: \"kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl\") pod \"10ba31d4-b71d-49ff-a687-683ee165449e\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.361291 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host\") pod \"10ba31d4-b71d-49ff-a687-683ee165449e\" (UID: \"10ba31d4-b71d-49ff-a687-683ee165449e\") " Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.361403 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host" (OuterVolumeSpecName: "host") pod "10ba31d4-b71d-49ff-a687-683ee165449e" (UID: "10ba31d4-b71d-49ff-a687-683ee165449e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.362094 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10ba31d4-b71d-49ff-a687-683ee165449e-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.367413 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl" (OuterVolumeSpecName: "kube-api-access-fjjxl") pod "10ba31d4-b71d-49ff-a687-683ee165449e" (UID: "10ba31d4-b71d-49ff-a687-683ee165449e"). InnerVolumeSpecName "kube-api-access-fjjxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:59 crc kubenswrapper[4728]: I1205 12:29:59.464173 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjjxl\" (UniqueName: \"kubernetes.io/projected/10ba31d4-b71d-49ff-a687-683ee165449e-kube-api-access-fjjxl\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.170589 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85133aa8f0884e491a96ef48f30ee3aa2e15c1f4a19cac8132976893a7afcf2c" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.170644 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-xjqwv" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.187992 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8"] Dec 05 12:30:00 crc kubenswrapper[4728]: E1205 12:30:00.188615 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="extract-utilities" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.188757 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="extract-utilities" Dec 05 12:30:00 crc kubenswrapper[4728]: E1205 12:30:00.188884 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="registry-server" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.188964 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="registry-server" Dec 05 12:30:00 crc kubenswrapper[4728]: E1205 12:30:00.189085 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="extract-content" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.189196 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="extract-content" Dec 05 12:30:00 crc kubenswrapper[4728]: E1205 12:30:00.189286 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ba31d4-b71d-49ff-a687-683ee165449e" containerName="container-00" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.189363 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ba31d4-b71d-49ff-a687-683ee165449e" containerName="container-00" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.189729 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="10ba31d4-b71d-49ff-a687-683ee165449e" containerName="container-00" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.189886 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa5787ff-69c1-44da-a0f1-dd0f51635a40" containerName="registry-server" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.190761 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.193472 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.193495 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.200391 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8"] Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.365851 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10ba31d4-b71d-49ff-a687-683ee165449e" path="/var/lib/kubelet/pods/10ba31d4-b71d-49ff-a687-683ee165449e/volumes" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.380574 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.380844 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.380888 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqdm7\" (UniqueName: \"kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.482358 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.482508 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.483545 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqdm7\" (UniqueName: \"kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.483465 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.486898 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.500155 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-qzdps"] Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.501393 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.505896 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqdm7\" (UniqueName: \"kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7\") pod \"collect-profiles-29415630-cr4t8\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.518157 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.584805 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2lm7\" (UniqueName: \"kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.585016 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.687393 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2lm7\" (UniqueName: \"kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.687914 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.688008 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.710589 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2lm7\" (UniqueName: \"kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7\") pod \"crc-debug-qzdps\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:00 crc kubenswrapper[4728]: I1205 12:30:00.965498 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:01 crc kubenswrapper[4728]: I1205 12:30:01.034001 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8"] Dec 05 12:30:01 crc kubenswrapper[4728]: W1205 12:30:01.041408 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c303fdf_e4e9_4ef0_ad97_ea1d664e57b3.slice/crio-354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6 WatchSource:0}: Error finding container 354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6: Status 404 returned error can't find the container with id 354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6 Dec 05 12:30:01 crc kubenswrapper[4728]: I1205 12:30:01.184814 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" event={"ID":"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3","Type":"ContainerStarted","Data":"354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6"} Dec 05 12:30:01 crc kubenswrapper[4728]: I1205 12:30:01.188174 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" event={"ID":"ed7f0739-003d-4287-9695-8856aec5782b","Type":"ContainerStarted","Data":"4e6f728e0c98ebf98d64fe5193e5e3aeb9b2ccdfcaeebf9d5530a7c37c144122"} Dec 05 12:30:02 crc kubenswrapper[4728]: I1205 12:30:02.197057 4728 generic.go:334] "Generic (PLEG): container finished" podID="9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" containerID="70450c8067323d13e1b175ce1e2a2fa4f0618284a64965c3e7ebbb9d24793d84" exitCode=0 Dec 05 12:30:02 crc kubenswrapper[4728]: I1205 12:30:02.197153 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" event={"ID":"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3","Type":"ContainerDied","Data":"70450c8067323d13e1b175ce1e2a2fa4f0618284a64965c3e7ebbb9d24793d84"} Dec 05 12:30:02 crc kubenswrapper[4728]: I1205 12:30:02.199308 4728 generic.go:334] "Generic (PLEG): container finished" podID="ed7f0739-003d-4287-9695-8856aec5782b" containerID="f3cbb1b6c6ca6afeb7a67f350fbe0ced16e7a71252b75a21a6e8ea9854543346" exitCode=0 Dec 05 12:30:02 crc kubenswrapper[4728]: I1205 12:30:02.199350 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" event={"ID":"ed7f0739-003d-4287-9695-8856aec5782b","Type":"ContainerDied","Data":"f3cbb1b6c6ca6afeb7a67f350fbe0ced16e7a71252b75a21a6e8ea9854543346"} Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.330837 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.456901 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host\") pod \"ed7f0739-003d-4287-9695-8856aec5782b\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.457217 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2lm7\" (UniqueName: \"kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7\") pod \"ed7f0739-003d-4287-9695-8856aec5782b\" (UID: \"ed7f0739-003d-4287-9695-8856aec5782b\") " Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.457342 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host" (OuterVolumeSpecName: "host") pod "ed7f0739-003d-4287-9695-8856aec5782b" (UID: "ed7f0739-003d-4287-9695-8856aec5782b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.457890 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed7f0739-003d-4287-9695-8856aec5782b-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.466309 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7" (OuterVolumeSpecName: "kube-api-access-l2lm7") pod "ed7f0739-003d-4287-9695-8856aec5782b" (UID: "ed7f0739-003d-4287-9695-8856aec5782b"). InnerVolumeSpecName "kube-api-access-l2lm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.557836 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.565570 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2lm7\" (UniqueName: \"kubernetes.io/projected/ed7f0739-003d-4287-9695-8856aec5782b-kube-api-access-l2lm7\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.666358 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqdm7\" (UniqueName: \"kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7\") pod \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.666523 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume\") pod \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.666549 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume\") pod \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\" (UID: \"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3\") " Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.667385 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" (UID: "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.670618 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7" (OuterVolumeSpecName: "kube-api-access-zqdm7") pod "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" (UID: "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3"). InnerVolumeSpecName "kube-api-access-zqdm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.673468 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" (UID: "9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.768340 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqdm7\" (UniqueName: \"kubernetes.io/projected/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-kube-api-access-zqdm7\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.768545 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:03 crc kubenswrapper[4728]: I1205 12:30:03.768600 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.216726 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" event={"ID":"9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3","Type":"ContainerDied","Data":"354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6"} Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.216763 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="354a767781629990e4638556c2c3cb36f468b78aa666247f1595796edc0778e6" Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.216844 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-cr4t8" Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.219404 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" event={"ID":"ed7f0739-003d-4287-9695-8856aec5782b","Type":"ContainerDied","Data":"4e6f728e0c98ebf98d64fe5193e5e3aeb9b2ccdfcaeebf9d5530a7c37c144122"} Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.219444 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e6f728e0c98ebf98d64fe5193e5e3aeb9b2ccdfcaeebf9d5530a7c37c144122" Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.219504 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-qzdps" Dec 05 12:30:04 crc kubenswrapper[4728]: E1205 12:30:04.305291 4728 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c303fdf_e4e9_4ef0_ad97_ea1d664e57b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.627873 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh"] Dec 05 12:30:04 crc kubenswrapper[4728]: I1205 12:30:04.636530 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-rplrh"] Dec 05 12:30:05 crc kubenswrapper[4728]: I1205 12:30:05.192330 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-qzdps"] Dec 05 12:30:05 crc kubenswrapper[4728]: I1205 12:30:05.200655 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-qzdps"] Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.369717 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91819e1a-92e1-4893-b375-90264108905d" path="/var/lib/kubelet/pods/91819e1a-92e1-4893-b375-90264108905d/volumes" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.370776 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed7f0739-003d-4287-9695-8856aec5782b" path="/var/lib/kubelet/pods/ed7f0739-003d-4287-9695-8856aec5782b/volumes" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.379136 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-hfb44"] Dec 05 12:30:06 crc kubenswrapper[4728]: E1205 12:30:06.379593 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" containerName="collect-profiles" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.379615 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" containerName="collect-profiles" Dec 05 12:30:06 crc kubenswrapper[4728]: E1205 12:30:06.379641 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7f0739-003d-4287-9695-8856aec5782b" containerName="container-00" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.379650 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7f0739-003d-4287-9695-8856aec5782b" containerName="container-00" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.380070 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c303fdf-e4e9-4ef0-ad97-ea1d664e57b3" containerName="collect-profiles" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.380114 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed7f0739-003d-4287-9695-8856aec5782b" containerName="container-00" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.380930 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.516371 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t5jd\" (UniqueName: \"kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.517212 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.619605 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.619719 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t5jd\" (UniqueName: \"kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.619892 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:06 crc kubenswrapper[4728]: I1205 12:30:06.895109 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t5jd\" (UniqueName: \"kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd\") pod \"crc-debug-hfb44\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:07 crc kubenswrapper[4728]: I1205 12:30:07.013964 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:07 crc kubenswrapper[4728]: I1205 12:30:07.247116 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" event={"ID":"d7cd338a-317e-4b37-a602-3b8bf564dcb8","Type":"ContainerStarted","Data":"9e1e13ec2bb9f35d75de6ce2b54f90571955698359015c6755fee69b668347da"} Dec 05 12:30:08 crc kubenswrapper[4728]: I1205 12:30:08.259995 4728 generic.go:334] "Generic (PLEG): container finished" podID="d7cd338a-317e-4b37-a602-3b8bf564dcb8" containerID="7c58f4e4f2d67276827b816a19cdb6a7b1a06239cdc504d3d43760e08eaa54fc" exitCode=0 Dec 05 12:30:08 crc kubenswrapper[4728]: I1205 12:30:08.260110 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" event={"ID":"d7cd338a-317e-4b37-a602-3b8bf564dcb8","Type":"ContainerDied","Data":"7c58f4e4f2d67276827b816a19cdb6a7b1a06239cdc504d3d43760e08eaa54fc"} Dec 05 12:30:08 crc kubenswrapper[4728]: I1205 12:30:08.299149 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-hfb44"] Dec 05 12:30:08 crc kubenswrapper[4728]: I1205 12:30:08.308771 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2zwd/crc-debug-hfb44"] Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.376776 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.482989 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t5jd\" (UniqueName: \"kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd\") pod \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.483206 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host\") pod \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\" (UID: \"d7cd338a-317e-4b37-a602-3b8bf564dcb8\") " Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.483336 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host" (OuterVolumeSpecName: "host") pod "d7cd338a-317e-4b37-a602-3b8bf564dcb8" (UID: "d7cd338a-317e-4b37-a602-3b8bf564dcb8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.483780 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7cd338a-317e-4b37-a602-3b8bf564dcb8-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.493028 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd" (OuterVolumeSpecName: "kube-api-access-8t5jd") pod "d7cd338a-317e-4b37-a602-3b8bf564dcb8" (UID: "d7cd338a-317e-4b37-a602-3b8bf564dcb8"). InnerVolumeSpecName "kube-api-access-8t5jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:09 crc kubenswrapper[4728]: I1205 12:30:09.586118 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t5jd\" (UniqueName: \"kubernetes.io/projected/d7cd338a-317e-4b37-a602-3b8bf564dcb8-kube-api-access-8t5jd\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:10 crc kubenswrapper[4728]: I1205 12:30:10.280944 4728 scope.go:117] "RemoveContainer" containerID="7c58f4e4f2d67276827b816a19cdb6a7b1a06239cdc504d3d43760e08eaa54fc" Dec 05 12:30:10 crc kubenswrapper[4728]: I1205 12:30:10.281002 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/crc-debug-hfb44" Dec 05 12:30:10 crc kubenswrapper[4728]: I1205 12:30:10.362335 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7cd338a-317e-4b37-a602-3b8bf564dcb8" path="/var/lib/kubelet/pods/d7cd338a-317e-4b37-a602-3b8bf564dcb8/volumes" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.384393 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-798767f9fd-kfrfz_24fcf86b-13a3-46c0-bea6-37ef4da29b48/barbican-api-log/0.log" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.389368 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-798767f9fd-kfrfz_24fcf86b-13a3-46c0-bea6-37ef4da29b48/barbican-api/0.log" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.562442 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76974b5d9d-khzwj_37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2/barbican-keystone-listener/0.log" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.658064 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f87fccb55-qhksr_2b11e743-92a0-4601-8cdf-935c3cc54a55/barbican-worker/0.log" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.835122 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f87fccb55-qhksr_2b11e743-92a0-4601-8cdf-935c3cc54a55/barbican-worker-log/0.log" Dec 05 12:30:25 crc kubenswrapper[4728]: I1205 12:30:25.943808 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82_cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.241013 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/ceilometer-central-agent/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.271066 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/ceilometer-notification-agent/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.380452 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/proxy-httpd/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.451509 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/sg-core/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.496231 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76974b5d9d-khzwj_37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2/barbican-keystone-listener-log/0.log" Dec 05 12:30:26 crc kubenswrapper[4728]: I1205 12:30:26.721274 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph_ec650e34-b972-46a5-886c-ba25b07fca9c/ceph/0.log" Dec 05 12:30:27 crc kubenswrapper[4728]: I1205 12:30:27.316115 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cfc23d81-9123-49a5-b770-4f0b60e01d35/cinder-api/0.log" Dec 05 12:30:27 crc kubenswrapper[4728]: I1205 12:30:27.438071 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_75b2a689-5a22-4496-af32-4e93e0b2f3df/probe/0.log" Dec 05 12:30:27 crc kubenswrapper[4728]: I1205 12:30:27.451628 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cfc23d81-9123-49a5-b770-4f0b60e01d35/cinder-api-log/0.log" Dec 05 12:30:27 crc kubenswrapper[4728]: I1205 12:30:27.692942 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_61360ddf-d4ef-4328-add9-ac6c2d95d563/probe/0.log" Dec 05 12:30:27 crc kubenswrapper[4728]: I1205 12:30:27.741237 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_61360ddf-d4ef-4328-add9-ac6c2d95d563/cinder-scheduler/0.log" Dec 05 12:30:28 crc kubenswrapper[4728]: I1205 12:30:28.038600 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_23cf88b0-870f-44f4-9f15-aa4b15d86a12/probe/0.log" Dec 05 12:30:28 crc kubenswrapper[4728]: I1205 12:30:28.358535 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-24vs6_96cca126-d9b0-4c1c-93d8-63872e4a5e1c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:28 crc kubenswrapper[4728]: I1205 12:30:28.630448 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q_a4231d17-68db-4e1d-b39d-6d3affe3c6a5/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:28 crc kubenswrapper[4728]: I1205 12:30:28.785951 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/init/0.log" Dec 05 12:30:28 crc kubenswrapper[4728]: I1205 12:30:28.827538 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_75b2a689-5a22-4496-af32-4e93e0b2f3df/cinder-backup/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.097268 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc_2b484238-d80c-4274-b0b6-ea03a050e575/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.109217 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/init/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.254202 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/dnsmasq-dns/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.325952 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_32cf3773-bc1a-4c62-9b1a-8fc95e42e403/glance-log/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.375124 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_32cf3773-bc1a-4c62-9b1a-8fc95e42e403/glance-httpd/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.605572 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f030a4aa-1b8c-4889-9385-56c75001c4f5/glance-httpd/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.626942 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f030a4aa-1b8c-4889-9385-56c75001c4f5/glance-log/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.880372 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wscr4_c307593d-70fb-42ac-987a-9e7639f530c6/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:29 crc kubenswrapper[4728]: I1205 12:30:29.936635 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7755888bd8-shzsv_841ca27f-0486-413e-975b-4f51b008883a/horizon/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.162993 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5jh58_d2794a25-aa06-4146-957e-5438b4005382/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.428545 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415601-tk5jd_5ab6c45e-d05b-4ddc-92e0-5addedce425d/keystone-cron/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.591658 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7755888bd8-shzsv_841ca27f-0486-413e-975b-4f51b008883a/horizon-log/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.640140 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_910efe4a-03b6-4aa7-aa87-d69b832a3db9/kube-state-metrics/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.709158 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_23cf88b0-870f-44f4-9f15-aa4b15d86a12/cinder-volume/0.log" Dec 05 12:30:30 crc kubenswrapper[4728]: I1205 12:30:30.868657 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-h792n_21cac74f-ba27-4db1-9cbe-6189f230e514/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:31 crc kubenswrapper[4728]: I1205 12:30:31.343525 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_30fe7fc4-13df-437a-8771-c6904804bcb9/probe/0.log" Dec 05 12:30:31 crc kubenswrapper[4728]: I1205 12:30:31.356487 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_686f83cd-910d-4bf2-977a-8544326152e4/manila-api/0.log" Dec 05 12:30:31 crc kubenswrapper[4728]: I1205 12:30:31.444561 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_30fe7fc4-13df-437a-8771-c6904804bcb9/manila-scheduler/0.log" Dec 05 12:30:31 crc kubenswrapper[4728]: I1205 12:30:31.666635 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e76d402c-8c19-4097-8c06-9bb28018f661/probe/0.log" Dec 05 12:30:31 crc kubenswrapper[4728]: I1205 12:30:31.964044 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e76d402c-8c19-4097-8c06-9bb28018f661/manila-share/0.log" Dec 05 12:30:32 crc kubenswrapper[4728]: I1205 12:30:32.198229 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_686f83cd-910d-4bf2-977a-8544326152e4/manila-api-log/0.log" Dec 05 12:30:32 crc kubenswrapper[4728]: I1205 12:30:32.661511 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj_60e7995e-9ae7-47b3-bd6a-991c444af447/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:32 crc kubenswrapper[4728]: I1205 12:30:32.929812 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5cf4cb67d5-pxwtj_9a26b328-f443-4f9a-a2ae-2042e3189096/neutron-httpd/0.log" Dec 05 12:30:33 crc kubenswrapper[4728]: I1205 12:30:33.428269 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5cf4cb67d5-pxwtj_9a26b328-f443-4f9a-a2ae-2042e3189096/neutron-api/0.log" Dec 05 12:30:34 crc kubenswrapper[4728]: I1205 12:30:34.341100 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7802cd18-c771-414b-afd5-f6d47c588a58/nova-cell0-conductor-conductor/0.log" Dec 05 12:30:34 crc kubenswrapper[4728]: I1205 12:30:34.632004 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6fbd7fcb8c-kr5v8_523f920a-f4d7-46db-8066-ad0c4f8d22d5/keystone-api/0.log" Dec 05 12:30:34 crc kubenswrapper[4728]: I1205 12:30:34.932517 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_50d65ab3-36a4-45da-bfbd-b66ff1541c6b/nova-cell1-conductor-conductor/0.log" Dec 05 12:30:35 crc kubenswrapper[4728]: I1205 12:30:35.312535 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e7183619-beae-465b-86cf-ccbb710d4ac8/nova-api-log/0.log" Dec 05 12:30:35 crc kubenswrapper[4728]: I1205 12:30:35.470279 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_3c17270c-7319-4bc9-af0b-f008615371f9/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 12:30:35 crc kubenswrapper[4728]: I1205 12:30:35.603826 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bw26p_1284e61e-761e-482e-930f-ba0e75280dd7/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:35 crc kubenswrapper[4728]: I1205 12:30:35.780497 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7a68b1f8-9521-44c8-8a8e-5bc26bc28047/nova-metadata-log/0.log" Dec 05 12:30:35 crc kubenswrapper[4728]: I1205 12:30:35.946509 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e7183619-beae-465b-86cf-ccbb710d4ac8/nova-api-api/0.log" Dec 05 12:30:36 crc kubenswrapper[4728]: I1205 12:30:36.180360 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/mysql-bootstrap/0.log" Dec 05 12:30:36 crc kubenswrapper[4728]: I1205 12:30:36.387915 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/mysql-bootstrap/0.log" Dec 05 12:30:36 crc kubenswrapper[4728]: I1205 12:30:36.403607 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/galera/0.log" Dec 05 12:30:36 crc kubenswrapper[4728]: I1205 12:30:36.548522 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_aa4cedf1-fd8d-4339-8569-f105adb2ca1a/nova-scheduler-scheduler/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.187817 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/mysql-bootstrap/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.360159 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/mysql-bootstrap/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.436176 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/galera/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.722986 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6860f6fe-8127-4cbd-af2d-7e5e0e4ed001/openstackclient/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.822087 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7a68b1f8-9521-44c8-8a8e-5bc26bc28047/nova-metadata-metadata/0.log" Dec 05 12:30:37 crc kubenswrapper[4728]: I1205 12:30:37.829858 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-74pwl_ed80e7c1-b5a1-4606-b110-5d205dd122b4/ovn-controller/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.069661 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server-init/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.112084 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-5shht_7e189ef3-6dab-4ce4-9cc3-b9bd409342ba/openstack-network-exporter/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.276721 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovs-vswitchd/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.281173 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server-init/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.336918 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.551519 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-tszdd_e1b21c39-5973-43f9-a5f5-73f7e3a1f778/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.590894 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_de655c8f-ba39-41bb-a5c0-c3195d4999ea/openstack-network-exporter/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.661592 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_de655c8f-ba39-41bb-a5c0-c3195d4999ea/ovn-northd/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.760596 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_de9c5f8c-fb05-44a5-804d-1f8f2129da92/openstack-network-exporter/0.log" Dec 05 12:30:38 crc kubenswrapper[4728]: I1205 12:30:38.844577 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_de9c5f8c-fb05-44a5-804d-1f8f2129da92/ovsdbserver-nb/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.007786 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_269bbc54-5980-4de2-ac45-d1d7ff6335e9/ovsdbserver-sb/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.010488 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_269bbc54-5980-4de2-ac45-d1d7ff6335e9/openstack-network-exporter/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.291418 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/setup-container/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.469564 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/setup-container/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.513605 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-554ccc7b5b-l2c6v_d7d32022-fd6b-4ecd-83d4-5b628f19e413/placement-api/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.539644 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/rabbitmq/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.730334 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/setup-container/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.738127 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-554ccc7b5b-l2c6v_d7d32022-fd6b-4ecd-83d4-5b628f19e413/placement-log/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.949134 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/setup-container/0.log" Dec 05 12:30:39 crc kubenswrapper[4728]: I1205 12:30:39.952471 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/rabbitmq/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.018665 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7_be7ac6c7-643c-42b4-bae5-0eab2ee3aea0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.128220 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-m6vb7_a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.275240 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt_12c3ce02-598e-48b2-b81c-7f80d3589de4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.396658 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7jz8f_45fff4a4-1d89-41c0-a166-935f921ad8ec/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.493420 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-p6mdl_63543ad1-6aa4-4b72-aa6d-4438fad98d08/ssh-known-hosts-edpm-deployment/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.722326 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6bf4577867-xpjkr_9e762b75-33c7-464f-a8a9-316b5209b2b3/proxy-server/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.822009 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6bf4577867-xpjkr_9e762b75-33c7-464f-a8a9-316b5209b2b3/proxy-httpd/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.827586 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-mwr89_f047ba61-512e-4899-95ec-2dd4a1862858/swift-ring-rebalance/0.log" Dec 05 12:30:40 crc kubenswrapper[4728]: I1205 12:30:40.933362 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-auditor/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.050248 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-reaper/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.113220 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-replicator/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.211846 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-server/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.268204 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-auditor/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.312007 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-replicator/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.387653 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-server/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.456993 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-updater/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.492365 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-auditor/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.563232 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-expirer/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.641377 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-server/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.669145 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-replicator/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.737750 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-updater/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.823530 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/rsync/0.log" Dec 05 12:30:41 crc kubenswrapper[4728]: I1205 12:30:41.862985 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/swift-recon-cron/0.log" Dec 05 12:30:42 crc kubenswrapper[4728]: I1205 12:30:42.056120 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg_08a82141-a6d5-4c68-9adb-9c4158a6c7c2/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:42 crc kubenswrapper[4728]: I1205 12:30:42.231616 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_b71aa6bd-22ea-4144-84ea-a241546286a2/tempest-tests-tempest-tests-runner/0.log" Dec 05 12:30:42 crc kubenswrapper[4728]: I1205 12:30:42.285050 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cbc422a5-ef18-4b9d-a3a4-c783d200dc25/test-operator-logs-container/0.log" Dec 05 12:30:42 crc kubenswrapper[4728]: I1205 12:30:42.437328 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm_8ea93fe3-4992-4d62-b2c2-f67ca4763c75/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:30:54 crc kubenswrapper[4728]: I1205 12:30:54.523404 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_efbf5688-8330-4166-a93b-03dcf8ed578d/memcached/0.log" Dec 05 12:30:55 crc kubenswrapper[4728]: I1205 12:30:55.702037 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:30:55 crc kubenswrapper[4728]: I1205 12:30:55.702363 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:02 crc kubenswrapper[4728]: I1205 12:31:02.661668 4728 scope.go:117] "RemoveContainer" containerID="c312adeed26f1b7a5ea1007299ad9183cbd000672b0d1db5f804d99a0ef7b49c" Dec 05 12:31:10 crc kubenswrapper[4728]: I1205 12:31:10.789080 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-btnkh_5d689dc0-c7c8-4af2-8f4c-45863ab88b69/kube-rbac-proxy/0.log" Dec 05 12:31:10 crc kubenswrapper[4728]: I1205 12:31:10.865243 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-btnkh_5d689dc0-c7c8-4af2-8f4c-45863ab88b69/manager/0.log" Dec 05 12:31:10 crc kubenswrapper[4728]: I1205 12:31:10.979683 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-zqc5j_941ddd04-049e-4247-98c2-6ef2117c2c69/kube-rbac-proxy/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.021329 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-zqc5j_941ddd04-049e-4247-98c2-6ef2117c2c69/manager/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.170307 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-dnlfm_df0f8091-3107-4a49-9672-8332e4c1f8c0/kube-rbac-proxy/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.244773 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-dnlfm_df0f8091-3107-4a49-9672-8332e4c1f8c0/manager/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.274635 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.460690 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.462259 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.470138 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.619140 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.635600 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.655537 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/extract/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.846591 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-nch5j_3149306c-d64a-4bdf-994a-ecec0489e472/kube-rbac-proxy/0.log" Dec 05 12:31:11 crc kubenswrapper[4728]: I1205 12:31:11.874920 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-ghbcj_403718e0-87fa-402a-844e-6b458a15b003/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.036802 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-nch5j_3149306c-d64a-4bdf-994a-ecec0489e472/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.050389 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-ghbcj_403718e0-87fa-402a-844e-6b458a15b003/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.112668 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-2zvrw_34f5a6c5-a316-450d-83a1-affbdd4d2e0e/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.220245 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-2zvrw_34f5a6c5-a316-450d-83a1-affbdd4d2e0e/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.290552 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-c2s6r_a1c012ce-e23c-4235-b2b2-56306e3d4722/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.475884 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-c2s6r_a1c012ce-e23c-4235-b2b2-56306e3d4722/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.492443 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-7jg64_03a7d3e9-4e85-496e-963f-f0c1e7e4cf04/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.506327 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-7jg64_03a7d3e9-4e85-496e-963f-f0c1e7e4cf04/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.704707 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6wcpq_6f5ec4c9-95e8-43ea-a137-9c781e4f234f/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.746634 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6wcpq_6f5ec4c9-95e8-43ea-a137-9c781e4f234f/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.910763 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n8x6r_d21af02e-d731-402f-aa09-1f705dc4e82b/kube-rbac-proxy/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.947494 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n8x6r_d21af02e-d731-402f-aa09-1f705dc4e82b/manager/0.log" Dec 05 12:31:12 crc kubenswrapper[4728]: I1205 12:31:12.987664 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-xjwhb_ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10/kube-rbac-proxy/0.log" Dec 05 12:31:13 crc kubenswrapper[4728]: I1205 12:31:13.124290 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-xjwhb_ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10/manager/0.log" Dec 05 12:31:13 crc kubenswrapper[4728]: I1205 12:31:13.137587 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-sdqtl_98cd0df5-f4a5-4515-80b5-d0ac625a527a/kube-rbac-proxy/0.log" Dec 05 12:31:13 crc kubenswrapper[4728]: I1205 12:31:13.235089 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-sdqtl_98cd0df5-f4a5-4515-80b5-d0ac625a527a/manager/0.log" Dec 05 12:31:13 crc kubenswrapper[4728]: I1205 12:31:13.334002 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8fvcf_45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6/kube-rbac-proxy/0.log" Dec 05 12:31:13 crc kubenswrapper[4728]: I1205 12:31:13.385273 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8fvcf_45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6/manager/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.004648 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-t7lpc_854e2a55-450f-48e7-93fb-fca327f4fd18/kube-rbac-proxy/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.136387 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm_6ee43140-9d2f-42c8-917f-eaa028a8e1b1/kube-rbac-proxy/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.143933 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-t7lpc_854e2a55-450f-48e7-93fb-fca327f4fd18/manager/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.413580 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm_6ee43140-9d2f-42c8-917f-eaa028a8e1b1/manager/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.667986 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-lvhsr_bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d/registry-server/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.774096 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6767b55986-t74w7_226ccaf4-1c8b-4a98-a3a6-122629462baa/operator/0.log" Dec 05 12:31:14 crc kubenswrapper[4728]: I1205 12:31:14.868082 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pqmth_b5925d20-e10a-4564-91f4-67acb55b2a01/kube-rbac-proxy/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.029678 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xnq92_dd2fed26-0e5c-49e0-ad15-3936a13680e7/kube-rbac-proxy/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.055076 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pqmth_b5925d20-e10a-4564-91f4-67acb55b2a01/manager/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.137262 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xnq92_dd2fed26-0e5c-49e0-ad15-3936a13680e7/manager/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.513912 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-64b69b8785-cvs4m_891e8e93-da9a-4b87-8e69-04fe149274cd/manager/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.656452 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-sb7lq_04a349f4-b388-4a9c-8dbc-54bd1fb46934/operator/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.758272 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-h6czw_db8744a4-edde-4a54-85e9-05089f650ba0/kube-rbac-proxy/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.793737 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-h6czw_db8744a4-edde-4a54-85e9-05089f650ba0/manager/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.932274 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-gbdj9_c16435ec-544a-4d19-8667-925c045ecf61/kube-rbac-proxy/0.log" Dec 05 12:31:15 crc kubenswrapper[4728]: I1205 12:31:15.946272 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-gbdj9_c16435ec-544a-4d19-8667-925c045ecf61/manager/0.log" Dec 05 12:31:16 crc kubenswrapper[4728]: I1205 12:31:16.010880 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sq989_5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d/kube-rbac-proxy/0.log" Dec 05 12:31:16 crc kubenswrapper[4728]: I1205 12:31:16.076541 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sq989_5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d/manager/0.log" Dec 05 12:31:16 crc kubenswrapper[4728]: I1205 12:31:16.188930 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-tc4ks_a5b101e4-a4f7-4c73-8327-e09cce07eb51/kube-rbac-proxy/0.log" Dec 05 12:31:16 crc kubenswrapper[4728]: I1205 12:31:16.189669 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-tc4ks_a5b101e4-a4f7-4c73-8327-e09cce07eb51/manager/0.log" Dec 05 12:31:25 crc kubenswrapper[4728]: I1205 12:31:25.701541 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:25 crc kubenswrapper[4728]: I1205 12:31:25.702061 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:36 crc kubenswrapper[4728]: I1205 12:31:36.568165 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5x4xq_99f17d34-cfff-4706-af23-04fff3d500bd/control-plane-machine-set-operator/0.log" Dec 05 12:31:36 crc kubenswrapper[4728]: I1205 12:31:36.772151 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vllpv_f4d671a2-6454-4bf6-a099-0c0e15de2f20/machine-api-operator/0.log" Dec 05 12:31:36 crc kubenswrapper[4728]: I1205 12:31:36.775592 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vllpv_f4d671a2-6454-4bf6-a099-0c0e15de2f20/kube-rbac-proxy/0.log" Dec 05 12:31:49 crc kubenswrapper[4728]: I1205 12:31:49.613875 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hhc56_159ff938-2eac-4774-beeb-18122124ceef/cert-manager-controller/0.log" Dec 05 12:31:49 crc kubenswrapper[4728]: I1205 12:31:49.831084 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zxdgf_21cf202a-ede4-4ba9-9180-2dcde628cd09/cert-manager-cainjector/0.log" Dec 05 12:31:49 crc kubenswrapper[4728]: I1205 12:31:49.846001 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-fb8dg_2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0/cert-manager-webhook/0.log" Dec 05 12:31:55 crc kubenswrapper[4728]: I1205 12:31:55.701858 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:55 crc kubenswrapper[4728]: I1205 12:31:55.702446 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:55 crc kubenswrapper[4728]: I1205 12:31:55.702499 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:31:55 crc kubenswrapper[4728]: I1205 12:31:55.703401 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:31:55 crc kubenswrapper[4728]: I1205 12:31:55.703459 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2" gracePeriod=600 Dec 05 12:31:56 crc kubenswrapper[4728]: I1205 12:31:56.295713 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2" exitCode=0 Dec 05 12:31:56 crc kubenswrapper[4728]: I1205 12:31:56.296293 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2"} Dec 05 12:31:56 crc kubenswrapper[4728]: I1205 12:31:56.296322 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69"} Dec 05 12:31:56 crc kubenswrapper[4728]: I1205 12:31:56.296340 4728 scope.go:117] "RemoveContainer" containerID="5a692b79ac4be1cd2916070664295474f23e3887e4afde68f6aa8d5465853c63" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.215439 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gxx2h_f97e7e26-99e0-403f-a6d5-5aa008101459/nmstate-console-plugin/0.log" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.437011 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-x9g4q_522d4b95-dda7-40b8-960e-f19f1b147c41/nmstate-handler/0.log" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.479161 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-6jmjp_c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6/kube-rbac-proxy/0.log" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.517111 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-6jmjp_c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6/nmstate-metrics/0.log" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.691529 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5rg82_2ce6b79a-c293-472b-90f8-7b56ce77b4cf/nmstate-operator/0.log" Dec 05 12:32:04 crc kubenswrapper[4728]: I1205 12:32:04.746700 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-7xz5t_7c2cda78-4bb1-416c-8762-8c1618a755ad/nmstate-webhook/0.log" Dec 05 12:32:19 crc kubenswrapper[4728]: I1205 12:32:19.788567 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2frdd_09963a34-a1db-4854-8a6a-475da8222a7b/kube-rbac-proxy/0.log" Dec 05 12:32:19 crc kubenswrapper[4728]: I1205 12:32:19.893907 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2frdd_09963a34-a1db-4854-8a6a-475da8222a7b/controller/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.380857 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.566211 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.579291 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.591940 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.620637 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.805361 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.812377 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.812755 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:32:20 crc kubenswrapper[4728]: I1205 12:32:20.859101 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.002214 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.008240 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.013131 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.089766 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/controller/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.224599 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/frr-metrics/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.241476 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/kube-rbac-proxy/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.318119 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/kube-rbac-proxy-frr/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.515976 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/reloader/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.560556 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-9m6g5_b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06/frr-k8s-webhook-server/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.780974 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-dc46c65cb-kfn26_1f9a485b-4186-4184-9f2a-81a4b74105d9/manager/0.log" Dec 05 12:32:21 crc kubenswrapper[4728]: I1205 12:32:21.966274 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-585ddd7f97-8nw7b_e085b90a-7d0a-4027-bf16-477076627681/webhook-server/0.log" Dec 05 12:32:22 crc kubenswrapper[4728]: I1205 12:32:22.051457 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7wkzn_ee5be811-5e9e-4a19-955b-944a9a457060/kube-rbac-proxy/0.log" Dec 05 12:32:22 crc kubenswrapper[4728]: I1205 12:32:22.853842 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7wkzn_ee5be811-5e9e-4a19-955b-944a9a457060/speaker/0.log" Dec 05 12:32:22 crc kubenswrapper[4728]: I1205 12:32:22.869414 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/frr/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.346682 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.482363 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.482416 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.510295 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.695715 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.709540 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.755369 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/extract/0.log" Dec 05 12:32:36 crc kubenswrapper[4728]: I1205 12:32:36.878118 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.465179 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.472170 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.482780 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.676062 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.698976 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.709226 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/extract/0.log" Dec 05 12:32:37 crc kubenswrapper[4728]: I1205 12:32:37.962695 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.153255 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.159086 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.168938 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.334398 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.419586 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:32:38 crc kubenswrapper[4728]: I1205 12:32:38.553244 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.153473 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/registry-server/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.272417 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.332529 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.341578 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.506720 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.510195 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.725708 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-nxkrc_416c984c-f4c2-4b3f-8dd6-c27724ac7c42/marketplace-operator/0.log" Dec 05 12:32:39 crc kubenswrapper[4728]: I1205 12:32:39.886582 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.010938 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.055790 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.091719 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.133981 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/registry-server/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.230539 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.310224 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.327054 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.360682 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/registry-server/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.519969 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.534279 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.549942 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.720726 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:32:40 crc kubenswrapper[4728]: I1205 12:32:40.730059 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:32:41 crc kubenswrapper[4728]: I1205 12:32:41.364510 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/registry-server/0.log" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.115760 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:31 crc kubenswrapper[4728]: E1205 12:33:31.117161 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7cd338a-317e-4b37-a602-3b8bf564dcb8" containerName="container-00" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.117184 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7cd338a-317e-4b37-a602-3b8bf564dcb8" containerName="container-00" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.117617 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7cd338a-317e-4b37-a602-3b8bf564dcb8" containerName="container-00" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.123039 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.149207 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.272626 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.272729 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5mhc\" (UniqueName: \"kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.272775 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.375191 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.375445 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.375548 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5mhc\" (UniqueName: \"kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.376687 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.377107 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.396556 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5mhc\" (UniqueName: \"kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc\") pod \"certified-operators-nd8g2\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.459401 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:31 crc kubenswrapper[4728]: I1205 12:33:31.980868 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:32 crc kubenswrapper[4728]: I1205 12:33:32.135559 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerStarted","Data":"e1a012430d8140c4cb8bd276425ef710fd4d3f8bbc40f4d876f140efe30639d3"} Dec 05 12:33:33 crc kubenswrapper[4728]: I1205 12:33:33.144887 4728 generic.go:334] "Generic (PLEG): container finished" podID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerID="c456cb3b152ea0799b268860d7251d037c5683776de9ad8962c6778c0ec00b16" exitCode=0 Dec 05 12:33:33 crc kubenswrapper[4728]: I1205 12:33:33.145131 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerDied","Data":"c456cb3b152ea0799b268860d7251d037c5683776de9ad8962c6778c0ec00b16"} Dec 05 12:33:33 crc kubenswrapper[4728]: I1205 12:33:33.147189 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:33:34 crc kubenswrapper[4728]: I1205 12:33:34.156782 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerStarted","Data":"c884cce35838eab14092e74f1a223d204ece223b4951b5cf714f9ac1b8eb7490"} Dec 05 12:33:35 crc kubenswrapper[4728]: I1205 12:33:35.171142 4728 generic.go:334] "Generic (PLEG): container finished" podID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerID="c884cce35838eab14092e74f1a223d204ece223b4951b5cf714f9ac1b8eb7490" exitCode=0 Dec 05 12:33:35 crc kubenswrapper[4728]: I1205 12:33:35.171264 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerDied","Data":"c884cce35838eab14092e74f1a223d204ece223b4951b5cf714f9ac1b8eb7490"} Dec 05 12:33:36 crc kubenswrapper[4728]: I1205 12:33:36.186016 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerStarted","Data":"1b30b93e0edbd9aacbb76177982ee03909a2809a39f4dff7e3a55f754ced55a8"} Dec 05 12:33:36 crc kubenswrapper[4728]: I1205 12:33:36.215007 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nd8g2" podStartSLOduration=2.81606065 podStartE2EDuration="5.214987089s" podCreationTimestamp="2025-12-05 12:33:31 +0000 UTC" firstStartedPulling="2025-12-05 12:33:33.146929746 +0000 UTC m=+5147.289052449" lastFinishedPulling="2025-12-05 12:33:35.545856195 +0000 UTC m=+5149.687978888" observedRunningTime="2025-12-05 12:33:36.207450218 +0000 UTC m=+5150.349572931" watchObservedRunningTime="2025-12-05 12:33:36.214987089 +0000 UTC m=+5150.357109782" Dec 05 12:33:41 crc kubenswrapper[4728]: I1205 12:33:41.460339 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:41 crc kubenswrapper[4728]: I1205 12:33:41.460838 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:41 crc kubenswrapper[4728]: I1205 12:33:41.534069 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:42 crc kubenswrapper[4728]: I1205 12:33:42.307935 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:42 crc kubenswrapper[4728]: I1205 12:33:42.377824 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:44 crc kubenswrapper[4728]: I1205 12:33:44.260422 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nd8g2" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="registry-server" containerID="cri-o://1b30b93e0edbd9aacbb76177982ee03909a2809a39f4dff7e3a55f754ced55a8" gracePeriod=2 Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.271150 4728 generic.go:334] "Generic (PLEG): container finished" podID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerID="1b30b93e0edbd9aacbb76177982ee03909a2809a39f4dff7e3a55f754ced55a8" exitCode=0 Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.271189 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerDied","Data":"1b30b93e0edbd9aacbb76177982ee03909a2809a39f4dff7e3a55f754ced55a8"} Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.862125 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.982688 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content\") pod \"f0dcfaf1-4d00-4149-b16e-482d11735490\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.982821 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities\") pod \"f0dcfaf1-4d00-4149-b16e-482d11735490\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.984101 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities" (OuterVolumeSpecName: "utilities") pod "f0dcfaf1-4d00-4149-b16e-482d11735490" (UID: "f0dcfaf1-4d00-4149-b16e-482d11735490"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.987333 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5mhc\" (UniqueName: \"kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc\") pod \"f0dcfaf1-4d00-4149-b16e-482d11735490\" (UID: \"f0dcfaf1-4d00-4149-b16e-482d11735490\") " Dec 05 12:33:45 crc kubenswrapper[4728]: I1205 12:33:45.988074 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.011320 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc" (OuterVolumeSpecName: "kube-api-access-z5mhc") pod "f0dcfaf1-4d00-4149-b16e-482d11735490" (UID: "f0dcfaf1-4d00-4149-b16e-482d11735490"). InnerVolumeSpecName "kube-api-access-z5mhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.030288 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0dcfaf1-4d00-4149-b16e-482d11735490" (UID: "f0dcfaf1-4d00-4149-b16e-482d11735490"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.090758 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5mhc\" (UniqueName: \"kubernetes.io/projected/f0dcfaf1-4d00-4149-b16e-482d11735490-kube-api-access-z5mhc\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.090871 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0dcfaf1-4d00-4149-b16e-482d11735490-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.283453 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nd8g2" event={"ID":"f0dcfaf1-4d00-4149-b16e-482d11735490","Type":"ContainerDied","Data":"e1a012430d8140c4cb8bd276425ef710fd4d3f8bbc40f4d876f140efe30639d3"} Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.284252 4728 scope.go:117] "RemoveContainer" containerID="1b30b93e0edbd9aacbb76177982ee03909a2809a39f4dff7e3a55f754ced55a8" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.283568 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nd8g2" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.305420 4728 scope.go:117] "RemoveContainer" containerID="c884cce35838eab14092e74f1a223d204ece223b4951b5cf714f9ac1b8eb7490" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.331709 4728 scope.go:117] "RemoveContainer" containerID="c456cb3b152ea0799b268860d7251d037c5683776de9ad8962c6778c0ec00b16" Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.332354 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.344753 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nd8g2"] Dec 05 12:33:46 crc kubenswrapper[4728]: I1205 12:33:46.363165 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" path="/var/lib/kubelet/pods/f0dcfaf1-4d00-4149-b16e-482d11735490/volumes" Dec 05 12:34:25 crc kubenswrapper[4728]: I1205 12:34:25.701692 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:25 crc kubenswrapper[4728]: I1205 12:34:25.702289 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.313438 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:42 crc kubenswrapper[4728]: E1205 12:34:42.314292 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="registry-server" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.314303 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="registry-server" Dec 05 12:34:42 crc kubenswrapper[4728]: E1205 12:34:42.314337 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="extract-content" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.314343 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="extract-content" Dec 05 12:34:42 crc kubenswrapper[4728]: E1205 12:34:42.314367 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="extract-utilities" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.314373 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="extract-utilities" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.314587 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0dcfaf1-4d00-4149-b16e-482d11735490" containerName="registry-server" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.316051 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.330869 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.371225 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.371363 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8psq5\" (UniqueName: \"kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.371515 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.473611 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.473840 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.473903 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8psq5\" (UniqueName: \"kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.474168 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.474452 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.492402 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8psq5\" (UniqueName: \"kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5\") pod \"community-operators-2445b\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:42 crc kubenswrapper[4728]: I1205 12:34:42.692511 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:43 crc kubenswrapper[4728]: I1205 12:34:43.188889 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:43 crc kubenswrapper[4728]: I1205 12:34:43.834129 4728 generic.go:334] "Generic (PLEG): container finished" podID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerID="da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8" exitCode=0 Dec 05 12:34:43 crc kubenswrapper[4728]: I1205 12:34:43.834271 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerDied","Data":"da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8"} Dec 05 12:34:43 crc kubenswrapper[4728]: I1205 12:34:43.834417 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerStarted","Data":"4922e846430a78ad95bd18e74259ca5b4f8d290a29edeb682a2c049db3540ed5"} Dec 05 12:34:44 crc kubenswrapper[4728]: I1205 12:34:44.845850 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerStarted","Data":"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45"} Dec 05 12:34:45 crc kubenswrapper[4728]: I1205 12:34:45.859493 4728 generic.go:334] "Generic (PLEG): container finished" podID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerID="37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45" exitCode=0 Dec 05 12:34:45 crc kubenswrapper[4728]: I1205 12:34:45.859602 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerDied","Data":"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45"} Dec 05 12:34:46 crc kubenswrapper[4728]: I1205 12:34:46.872912 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerStarted","Data":"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12"} Dec 05 12:34:46 crc kubenswrapper[4728]: I1205 12:34:46.896199 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2445b" podStartSLOduration=2.461088427 podStartE2EDuration="4.896178774s" podCreationTimestamp="2025-12-05 12:34:42 +0000 UTC" firstStartedPulling="2025-12-05 12:34:43.837472239 +0000 UTC m=+5217.979594932" lastFinishedPulling="2025-12-05 12:34:46.272562596 +0000 UTC m=+5220.414685279" observedRunningTime="2025-12-05 12:34:46.889748232 +0000 UTC m=+5221.031870925" watchObservedRunningTime="2025-12-05 12:34:46.896178774 +0000 UTC m=+5221.038301487" Dec 05 12:34:52 crc kubenswrapper[4728]: I1205 12:34:52.693272 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:52 crc kubenswrapper[4728]: I1205 12:34:52.696313 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:52 crc kubenswrapper[4728]: I1205 12:34:52.753842 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:52 crc kubenswrapper[4728]: I1205 12:34:52.985427 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:53 crc kubenswrapper[4728]: I1205 12:34:53.034832 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:54 crc kubenswrapper[4728]: I1205 12:34:54.949819 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2445b" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="registry-server" containerID="cri-o://4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12" gracePeriod=2 Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.701357 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.701415 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.950553 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.959300 4728 generic.go:334] "Generic (PLEG): container finished" podID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerID="4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12" exitCode=0 Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.959371 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerDied","Data":"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12"} Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.959403 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2445b" event={"ID":"3723b525-77b0-4cea-93cb-9e00d44951f8","Type":"ContainerDied","Data":"4922e846430a78ad95bd18e74259ca5b4f8d290a29edeb682a2c049db3540ed5"} Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.959423 4728 scope.go:117] "RemoveContainer" containerID="4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12" Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.959572 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2445b" Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.962411 4728 generic.go:334] "Generic (PLEG): container finished" podID="6d145479-9865-48c2-9c3a-34b5937af539" containerID="ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89" exitCode=0 Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.962444 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" event={"ID":"6d145479-9865-48c2-9c3a-34b5937af539","Type":"ContainerDied","Data":"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89"} Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.963050 4728 scope.go:117] "RemoveContainer" containerID="ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89" Dec 05 12:34:55 crc kubenswrapper[4728]: I1205 12:34:55.981970 4728 scope.go:117] "RemoveContainer" containerID="37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.011681 4728 scope.go:117] "RemoveContainer" containerID="da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.050229 4728 scope.go:117] "RemoveContainer" containerID="4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12" Dec 05 12:34:56 crc kubenswrapper[4728]: E1205 12:34:56.050688 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12\": container with ID starting with 4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12 not found: ID does not exist" containerID="4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.050718 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12"} err="failed to get container status \"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12\": rpc error: code = NotFound desc = could not find container \"4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12\": container with ID starting with 4912b6bc5c2f551178b2cdde525cde9ace940c18a6ed06e090d003989a857b12 not found: ID does not exist" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.050740 4728 scope.go:117] "RemoveContainer" containerID="37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45" Dec 05 12:34:56 crc kubenswrapper[4728]: E1205 12:34:56.051083 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45\": container with ID starting with 37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45 not found: ID does not exist" containerID="37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.051192 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45"} err="failed to get container status \"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45\": rpc error: code = NotFound desc = could not find container \"37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45\": container with ID starting with 37d55875802225b3402e243b8836d19a8aabd1fc979d42e2e9ce1e78bbed8f45 not found: ID does not exist" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.051307 4728 scope.go:117] "RemoveContainer" containerID="da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8" Dec 05 12:34:56 crc kubenswrapper[4728]: E1205 12:34:56.051711 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8\": container with ID starting with da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8 not found: ID does not exist" containerID="da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.051732 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8"} err="failed to get container status \"da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8\": rpc error: code = NotFound desc = could not find container \"da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8\": container with ID starting with da848b57591fda5bb7f964b2ef6b150dc2d8e284634b097fcfc38369a50fccb8 not found: ID does not exist" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.094131 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities\") pod \"3723b525-77b0-4cea-93cb-9e00d44951f8\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.094547 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8psq5\" (UniqueName: \"kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5\") pod \"3723b525-77b0-4cea-93cb-9e00d44951f8\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.094676 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content\") pod \"3723b525-77b0-4cea-93cb-9e00d44951f8\" (UID: \"3723b525-77b0-4cea-93cb-9e00d44951f8\") " Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.095665 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities" (OuterVolumeSpecName: "utilities") pod "3723b525-77b0-4cea-93cb-9e00d44951f8" (UID: "3723b525-77b0-4cea-93cb-9e00d44951f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.096699 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.101335 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5" (OuterVolumeSpecName: "kube-api-access-8psq5") pod "3723b525-77b0-4cea-93cb-9e00d44951f8" (UID: "3723b525-77b0-4cea-93cb-9e00d44951f8"). InnerVolumeSpecName "kube-api-access-8psq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.148694 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3723b525-77b0-4cea-93cb-9e00d44951f8" (UID: "3723b525-77b0-4cea-93cb-9e00d44951f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.198725 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8psq5\" (UniqueName: \"kubernetes.io/projected/3723b525-77b0-4cea-93cb-9e00d44951f8-kube-api-access-8psq5\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.198767 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3723b525-77b0-4cea-93cb-9e00d44951f8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.293770 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.304588 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2445b"] Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.363196 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" path="/var/lib/kubelet/pods/3723b525-77b0-4cea-93cb-9e00d44951f8/volumes" Dec 05 12:34:56 crc kubenswrapper[4728]: I1205 12:34:56.575857 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2zwd_must-gather-mjtfz_6d145479-9865-48c2-9c3a-34b5937af539/gather/0.log" Dec 05 12:35:05 crc kubenswrapper[4728]: I1205 12:35:05.585837 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2zwd/must-gather-mjtfz"] Dec 05 12:35:05 crc kubenswrapper[4728]: I1205 12:35:05.586436 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="copy" containerID="cri-o://661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339" gracePeriod=2 Dec 05 12:35:05 crc kubenswrapper[4728]: I1205 12:35:05.596041 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2zwd/must-gather-mjtfz"] Dec 05 12:35:05 crc kubenswrapper[4728]: I1205 12:35:05.985995 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2zwd_must-gather-mjtfz_6d145479-9865-48c2-9c3a-34b5937af539/copy/0.log" Dec 05 12:35:05 crc kubenswrapper[4728]: I1205 12:35:05.987023 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.054588 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2zwd_must-gather-mjtfz_6d145479-9865-48c2-9c3a-34b5937af539/copy/0.log" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.055094 4728 generic.go:334] "Generic (PLEG): container finished" podID="6d145479-9865-48c2-9c3a-34b5937af539" containerID="661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339" exitCode=143 Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.055178 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2zwd/must-gather-mjtfz" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.055210 4728 scope.go:117] "RemoveContainer" containerID="661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.076889 4728 scope.go:117] "RemoveContainer" containerID="ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.096842 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnbfm\" (UniqueName: \"kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm\") pod \"6d145479-9865-48c2-9c3a-34b5937af539\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.096965 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output\") pod \"6d145479-9865-48c2-9c3a-34b5937af539\" (UID: \"6d145479-9865-48c2-9c3a-34b5937af539\") " Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.104111 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm" (OuterVolumeSpecName: "kube-api-access-rnbfm") pod "6d145479-9865-48c2-9c3a-34b5937af539" (UID: "6d145479-9865-48c2-9c3a-34b5937af539"). InnerVolumeSpecName "kube-api-access-rnbfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.192805 4728 scope.go:117] "RemoveContainer" containerID="661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339" Dec 05 12:35:06 crc kubenswrapper[4728]: E1205 12:35:06.193231 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339\": container with ID starting with 661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339 not found: ID does not exist" containerID="661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.193277 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339"} err="failed to get container status \"661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339\": rpc error: code = NotFound desc = could not find container \"661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339\": container with ID starting with 661ec5d4218aed279ff9c86e3625c61bb58d25ccb574c594676d121467e4c339 not found: ID does not exist" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.193302 4728 scope.go:117] "RemoveContainer" containerID="ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89" Dec 05 12:35:06 crc kubenswrapper[4728]: E1205 12:35:06.193523 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89\": container with ID starting with ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89 not found: ID does not exist" containerID="ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.193557 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89"} err="failed to get container status \"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89\": rpc error: code = NotFound desc = could not find container \"ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89\": container with ID starting with ba274a925c79d5d6ed1356b3fd856199ad16aafc86dac9ba6b6cad4916e99b89 not found: ID does not exist" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.199070 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnbfm\" (UniqueName: \"kubernetes.io/projected/6d145479-9865-48c2-9c3a-34b5937af539-kube-api-access-rnbfm\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.323564 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6d145479-9865-48c2-9c3a-34b5937af539" (UID: "6d145479-9865-48c2-9c3a-34b5937af539"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.368098 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d145479-9865-48c2-9c3a-34b5937af539" path="/var/lib/kubelet/pods/6d145479-9865-48c2-9c3a-34b5937af539/volumes" Dec 05 12:35:06 crc kubenswrapper[4728]: I1205 12:35:06.402555 4728 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6d145479-9865-48c2-9c3a-34b5937af539-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 12:35:25 crc kubenswrapper[4728]: I1205 12:35:25.702197 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:35:25 crc kubenswrapper[4728]: I1205 12:35:25.702741 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:35:25 crc kubenswrapper[4728]: I1205 12:35:25.702814 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:35:25 crc kubenswrapper[4728]: I1205 12:35:25.703512 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:35:25 crc kubenswrapper[4728]: I1205 12:35:25.703599 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" gracePeriod=600 Dec 05 12:35:25 crc kubenswrapper[4728]: E1205 12:35:25.828021 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:35:26 crc kubenswrapper[4728]: I1205 12:35:26.244970 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" exitCode=0 Dec 05 12:35:26 crc kubenswrapper[4728]: I1205 12:35:26.245015 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69"} Dec 05 12:35:26 crc kubenswrapper[4728]: I1205 12:35:26.245415 4728 scope.go:117] "RemoveContainer" containerID="a0bc77d6c0813c6450a27dc8cefc7322664efcc392c3072ff03b382186801ae2" Dec 05 12:35:26 crc kubenswrapper[4728]: I1205 12:35:26.247222 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:35:26 crc kubenswrapper[4728]: E1205 12:35:26.248828 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:35:37 crc kubenswrapper[4728]: I1205 12:35:37.359518 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:35:37 crc kubenswrapper[4728]: E1205 12:35:37.361058 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:35:52 crc kubenswrapper[4728]: I1205 12:35:52.352127 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:35:52 crc kubenswrapper[4728]: E1205 12:35:52.352902 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:36:02 crc kubenswrapper[4728]: I1205 12:36:02.844367 4728 scope.go:117] "RemoveContainer" containerID="f3cbb1b6c6ca6afeb7a67f350fbe0ced16e7a71252b75a21a6e8ea9854543346" Dec 05 12:36:02 crc kubenswrapper[4728]: I1205 12:36:02.866710 4728 scope.go:117] "RemoveContainer" containerID="0567d6c7950c6478d5e5593e27ebd874eeb2092f51f55ccf2847d2157b6ed4b6" Dec 05 12:36:03 crc kubenswrapper[4728]: I1205 12:36:03.352037 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:36:03 crc kubenswrapper[4728]: E1205 12:36:03.352860 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:36:15 crc kubenswrapper[4728]: I1205 12:36:15.353486 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:36:15 crc kubenswrapper[4728]: E1205 12:36:15.354908 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.159131 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.159990 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="copy" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160003 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="copy" Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.160014 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="extract-utilities" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160020 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="extract-utilities" Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.160043 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="gather" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160050 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="gather" Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.160067 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="extract-content" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160073 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="extract-content" Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.160082 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="registry-server" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160088 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="registry-server" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160280 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="gather" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160315 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d145479-9865-48c2-9c3a-34b5937af539" containerName="copy" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.160327 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="3723b525-77b0-4cea-93cb-9e00d44951f8" containerName="registry-server" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.161705 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.182000 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.270134 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.270286 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz969\" (UniqueName: \"kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.270329 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.352089 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:36:30 crc kubenswrapper[4728]: E1205 12:36:30.352368 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.372121 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz969\" (UniqueName: \"kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.372189 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.372304 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.372704 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.372773 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.404593 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz969\" (UniqueName: \"kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969\") pod \"redhat-marketplace-hrdql\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.492629 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:30 crc kubenswrapper[4728]: I1205 12:36:30.980510 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:31 crc kubenswrapper[4728]: I1205 12:36:31.869388 4728 generic.go:334] "Generic (PLEG): container finished" podID="01c5db12-a423-4844-8303-464b328772c2" containerID="774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540" exitCode=0 Dec 05 12:36:31 crc kubenswrapper[4728]: I1205 12:36:31.869462 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerDied","Data":"774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540"} Dec 05 12:36:31 crc kubenswrapper[4728]: I1205 12:36:31.869730 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerStarted","Data":"586b16c360e5771a15a45e610c779e7ab77e773347189b43c2f6091c0f741e49"} Dec 05 12:36:32 crc kubenswrapper[4728]: I1205 12:36:32.879408 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerStarted","Data":"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe"} Dec 05 12:36:33 crc kubenswrapper[4728]: I1205 12:36:33.889723 4728 generic.go:334] "Generic (PLEG): container finished" podID="01c5db12-a423-4844-8303-464b328772c2" containerID="8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe" exitCode=0 Dec 05 12:36:33 crc kubenswrapper[4728]: I1205 12:36:33.889780 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerDied","Data":"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe"} Dec 05 12:36:34 crc kubenswrapper[4728]: I1205 12:36:34.901866 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerStarted","Data":"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff"} Dec 05 12:36:34 crc kubenswrapper[4728]: I1205 12:36:34.929682 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hrdql" podStartSLOduration=2.482074932 podStartE2EDuration="4.929663641s" podCreationTimestamp="2025-12-05 12:36:30 +0000 UTC" firstStartedPulling="2025-12-05 12:36:31.871826061 +0000 UTC m=+5326.013948764" lastFinishedPulling="2025-12-05 12:36:34.31941478 +0000 UTC m=+5328.461537473" observedRunningTime="2025-12-05 12:36:34.919632573 +0000 UTC m=+5329.061755276" watchObservedRunningTime="2025-12-05 12:36:34.929663641 +0000 UTC m=+5329.071786334" Dec 05 12:36:40 crc kubenswrapper[4728]: I1205 12:36:40.493040 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:40 crc kubenswrapper[4728]: I1205 12:36:40.493466 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:40 crc kubenswrapper[4728]: I1205 12:36:40.547867 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:41 crc kubenswrapper[4728]: I1205 12:36:41.040518 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:41 crc kubenswrapper[4728]: I1205 12:36:41.352604 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:36:41 crc kubenswrapper[4728]: E1205 12:36:41.353170 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:36:41 crc kubenswrapper[4728]: I1205 12:36:41.757474 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:42 crc kubenswrapper[4728]: I1205 12:36:42.991850 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hrdql" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="registry-server" containerID="cri-o://0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff" gracePeriod=2 Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.470617 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.584915 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities\") pod \"01c5db12-a423-4844-8303-464b328772c2\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.585210 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content\") pod \"01c5db12-a423-4844-8303-464b328772c2\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.585238 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz969\" (UniqueName: \"kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969\") pod \"01c5db12-a423-4844-8303-464b328772c2\" (UID: \"01c5db12-a423-4844-8303-464b328772c2\") " Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.585942 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities" (OuterVolumeSpecName: "utilities") pod "01c5db12-a423-4844-8303-464b328772c2" (UID: "01c5db12-a423-4844-8303-464b328772c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.595222 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969" (OuterVolumeSpecName: "kube-api-access-bz969") pod "01c5db12-a423-4844-8303-464b328772c2" (UID: "01c5db12-a423-4844-8303-464b328772c2"). InnerVolumeSpecName "kube-api-access-bz969". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.608069 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01c5db12-a423-4844-8303-464b328772c2" (UID: "01c5db12-a423-4844-8303-464b328772c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.687444 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.687492 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz969\" (UniqueName: \"kubernetes.io/projected/01c5db12-a423-4844-8303-464b328772c2-kube-api-access-bz969\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:43 crc kubenswrapper[4728]: I1205 12:36:43.687502 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01c5db12-a423-4844-8303-464b328772c2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.008290 4728 generic.go:334] "Generic (PLEG): container finished" podID="01c5db12-a423-4844-8303-464b328772c2" containerID="0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff" exitCode=0 Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.008385 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrdql" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.008378 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerDied","Data":"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff"} Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.008456 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrdql" event={"ID":"01c5db12-a423-4844-8303-464b328772c2","Type":"ContainerDied","Data":"586b16c360e5771a15a45e610c779e7ab77e773347189b43c2f6091c0f741e49"} Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.008513 4728 scope.go:117] "RemoveContainer" containerID="0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.026012 4728 scope.go:117] "RemoveContainer" containerID="8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.057026 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.067878 4728 scope.go:117] "RemoveContainer" containerID="774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.069214 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrdql"] Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.098426 4728 scope.go:117] "RemoveContainer" containerID="0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff" Dec 05 12:36:44 crc kubenswrapper[4728]: E1205 12:36:44.098872 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff\": container with ID starting with 0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff not found: ID does not exist" containerID="0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.098903 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff"} err="failed to get container status \"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff\": rpc error: code = NotFound desc = could not find container \"0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff\": container with ID starting with 0224910bc5e08e89bf30bfb0b58fe1632fd49fa91aa4c4d659778325c1f141ff not found: ID does not exist" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.098926 4728 scope.go:117] "RemoveContainer" containerID="8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe" Dec 05 12:36:44 crc kubenswrapper[4728]: E1205 12:36:44.099173 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe\": container with ID starting with 8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe not found: ID does not exist" containerID="8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.099230 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe"} err="failed to get container status \"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe\": rpc error: code = NotFound desc = could not find container \"8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe\": container with ID starting with 8ef01c1243812ca6d50530ea79c82e3a135aadf1692266fc16101d06d4e494fe not found: ID does not exist" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.099277 4728 scope.go:117] "RemoveContainer" containerID="774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540" Dec 05 12:36:44 crc kubenswrapper[4728]: E1205 12:36:44.099572 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540\": container with ID starting with 774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540 not found: ID does not exist" containerID="774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.099599 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540"} err="failed to get container status \"774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540\": rpc error: code = NotFound desc = could not find container \"774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540\": container with ID starting with 774ca55f2349f587ecd6cbdbdd90969f682e0b89308d9d859117d5a83f78e540 not found: ID does not exist" Dec 05 12:36:44 crc kubenswrapper[4728]: I1205 12:36:44.367930 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01c5db12-a423-4844-8303-464b328772c2" path="/var/lib/kubelet/pods/01c5db12-a423-4844-8303-464b328772c2/volumes" Dec 05 12:36:55 crc kubenswrapper[4728]: I1205 12:36:55.353326 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:36:55 crc kubenswrapper[4728]: E1205 12:36:55.354354 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:37:08 crc kubenswrapper[4728]: I1205 12:37:08.352691 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:37:08 crc kubenswrapper[4728]: E1205 12:37:08.354747 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:37:19 crc kubenswrapper[4728]: I1205 12:37:19.352496 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:37:19 crc kubenswrapper[4728]: E1205 12:37:19.353494 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:37:34 crc kubenswrapper[4728]: I1205 12:37:34.351801 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:37:34 crc kubenswrapper[4728]: E1205 12:37:34.352546 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:37:48 crc kubenswrapper[4728]: I1205 12:37:48.352122 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:37:48 crc kubenswrapper[4728]: E1205 12:37:48.352989 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:00 crc kubenswrapper[4728]: I1205 12:38:00.353922 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:38:00 crc kubenswrapper[4728]: E1205 12:38:00.355107 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.285236 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58b87/must-gather-2nh47"] Dec 05 12:38:07 crc kubenswrapper[4728]: E1205 12:38:07.286298 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="extract-utilities" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.286317 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="extract-utilities" Dec 05 12:38:07 crc kubenswrapper[4728]: E1205 12:38:07.286336 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="extract-content" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.286345 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="extract-content" Dec 05 12:38:07 crc kubenswrapper[4728]: E1205 12:38:07.286382 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="registry-server" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.286390 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="registry-server" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.286656 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c5db12-a423-4844-8303-464b328772c2" containerName="registry-server" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.288010 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.289934 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-58b87"/"kube-root-ca.crt" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.289944 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-58b87"/"openshift-service-ca.crt" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.305532 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-58b87/must-gather-2nh47"] Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.461344 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.461943 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2crpg\" (UniqueName: \"kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.563380 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2crpg\" (UniqueName: \"kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.563452 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.564010 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.586331 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2crpg\" (UniqueName: \"kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg\") pod \"must-gather-2nh47\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:07 crc kubenswrapper[4728]: I1205 12:38:07.610437 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:38:08 crc kubenswrapper[4728]: I1205 12:38:08.078653 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-58b87/must-gather-2nh47"] Dec 05 12:38:08 crc kubenswrapper[4728]: I1205 12:38:08.135468 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/must-gather-2nh47" event={"ID":"32666438-f1e1-4418-bcf4-53164c4fe660","Type":"ContainerStarted","Data":"fe109ae89e50896dd5e4495a1e80e37cdb6999adae031161feeb3ac8546d4655"} Dec 05 12:38:09 crc kubenswrapper[4728]: I1205 12:38:09.148517 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/must-gather-2nh47" event={"ID":"32666438-f1e1-4418-bcf4-53164c4fe660","Type":"ContainerStarted","Data":"7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db"} Dec 05 12:38:09 crc kubenswrapper[4728]: I1205 12:38:09.148843 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/must-gather-2nh47" event={"ID":"32666438-f1e1-4418-bcf4-53164c4fe660","Type":"ContainerStarted","Data":"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3"} Dec 05 12:38:09 crc kubenswrapper[4728]: I1205 12:38:09.170327 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-58b87/must-gather-2nh47" podStartSLOduration=2.170302127 podStartE2EDuration="2.170302127s" podCreationTimestamp="2025-12-05 12:38:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:38:09.165521619 +0000 UTC m=+5423.307644312" watchObservedRunningTime="2025-12-05 12:38:09.170302127 +0000 UTC m=+5423.312424820" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.346685 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58b87/crc-debug-bvkvr"] Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.349590 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.352682 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:38:12 crc kubenswrapper[4728]: E1205 12:38:12.353071 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.355527 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58b87"/"default-dockercfg-vlns6" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.465610 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpqml\" (UniqueName: \"kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.465714 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.567240 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.567705 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpqml\" (UniqueName: \"kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.568154 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.589067 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpqml\" (UniqueName: \"kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml\") pod \"crc-debug-bvkvr\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: I1205 12:38:12.671146 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:12 crc kubenswrapper[4728]: W1205 12:38:12.698752 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c184dd0_befe_4743_b27b_07a19761c1b2.slice/crio-52de12055d4c73533054cc33ff10ab179db5bdf3de687144d1d63417dc8951de WatchSource:0}: Error finding container 52de12055d4c73533054cc33ff10ab179db5bdf3de687144d1d63417dc8951de: Status 404 returned error can't find the container with id 52de12055d4c73533054cc33ff10ab179db5bdf3de687144d1d63417dc8951de Dec 05 12:38:13 crc kubenswrapper[4728]: I1205 12:38:13.188769 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-bvkvr" event={"ID":"1c184dd0-befe-4743-b27b-07a19761c1b2","Type":"ContainerStarted","Data":"52de12055d4c73533054cc33ff10ab179db5bdf3de687144d1d63417dc8951de"} Dec 05 12:38:14 crc kubenswrapper[4728]: I1205 12:38:14.207631 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-bvkvr" event={"ID":"1c184dd0-befe-4743-b27b-07a19761c1b2","Type":"ContainerStarted","Data":"fdef8a9211248c42e0cb9ebb0d823945b82ddfc9da15a0caabece724d549bbf0"} Dec 05 12:38:15 crc kubenswrapper[4728]: I1205 12:38:15.226975 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-58b87/crc-debug-bvkvr" podStartSLOduration=3.226950682 podStartE2EDuration="3.226950682s" podCreationTimestamp="2025-12-05 12:38:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:38:15.226016727 +0000 UTC m=+5429.368139420" watchObservedRunningTime="2025-12-05 12:38:15.226950682 +0000 UTC m=+5429.369073375" Dec 05 12:38:24 crc kubenswrapper[4728]: I1205 12:38:24.353500 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:38:24 crc kubenswrapper[4728]: E1205 12:38:24.354471 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:38 crc kubenswrapper[4728]: I1205 12:38:38.352624 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:38:38 crc kubenswrapper[4728]: E1205 12:38:38.354045 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:52 crc kubenswrapper[4728]: I1205 12:38:52.352840 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:38:52 crc kubenswrapper[4728]: E1205 12:38:52.353683 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:38:58 crc kubenswrapper[4728]: I1205 12:38:58.617364 4728 generic.go:334] "Generic (PLEG): container finished" podID="1c184dd0-befe-4743-b27b-07a19761c1b2" containerID="fdef8a9211248c42e0cb9ebb0d823945b82ddfc9da15a0caabece724d549bbf0" exitCode=0 Dec 05 12:38:58 crc kubenswrapper[4728]: I1205 12:38:58.617475 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-bvkvr" event={"ID":"1c184dd0-befe-4743-b27b-07a19761c1b2","Type":"ContainerDied","Data":"fdef8a9211248c42e0cb9ebb0d823945b82ddfc9da15a0caabece724d549bbf0"} Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.735605 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.768882 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58b87/crc-debug-bvkvr"] Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.776343 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58b87/crc-debug-bvkvr"] Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.813439 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host\") pod \"1c184dd0-befe-4743-b27b-07a19761c1b2\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.813509 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpqml\" (UniqueName: \"kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml\") pod \"1c184dd0-befe-4743-b27b-07a19761c1b2\" (UID: \"1c184dd0-befe-4743-b27b-07a19761c1b2\") " Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.813646 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host" (OuterVolumeSpecName: "host") pod "1c184dd0-befe-4743-b27b-07a19761c1b2" (UID: "1c184dd0-befe-4743-b27b-07a19761c1b2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.814278 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1c184dd0-befe-4743-b27b-07a19761c1b2-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.823157 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml" (OuterVolumeSpecName: "kube-api-access-vpqml") pod "1c184dd0-befe-4743-b27b-07a19761c1b2" (UID: "1c184dd0-befe-4743-b27b-07a19761c1b2"). InnerVolumeSpecName "kube-api-access-vpqml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:38:59 crc kubenswrapper[4728]: I1205 12:38:59.916051 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpqml\" (UniqueName: \"kubernetes.io/projected/1c184dd0-befe-4743-b27b-07a19761c1b2-kube-api-access-vpqml\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.363244 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c184dd0-befe-4743-b27b-07a19761c1b2" path="/var/lib/kubelet/pods/1c184dd0-befe-4743-b27b-07a19761c1b2/volumes" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.639218 4728 scope.go:117] "RemoveContainer" containerID="fdef8a9211248c42e0cb9ebb0d823945b82ddfc9da15a0caabece724d549bbf0" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.639387 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-bvkvr" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.947227 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58b87/crc-debug-kfrgk"] Dec 05 12:39:00 crc kubenswrapper[4728]: E1205 12:39:00.947629 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c184dd0-befe-4743-b27b-07a19761c1b2" containerName="container-00" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.947641 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c184dd0-befe-4743-b27b-07a19761c1b2" containerName="container-00" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.947833 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c184dd0-befe-4743-b27b-07a19761c1b2" containerName="container-00" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.948434 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:00 crc kubenswrapper[4728]: I1205 12:39:00.950574 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58b87"/"default-dockercfg-vlns6" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.039902 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.040012 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pggbx\" (UniqueName: \"kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.141919 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.142011 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pggbx\" (UniqueName: \"kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.142050 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.171758 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pggbx\" (UniqueName: \"kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx\") pod \"crc-debug-kfrgk\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.269257 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.650125 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-kfrgk" event={"ID":"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c","Type":"ContainerStarted","Data":"150f9b0aae85b31cbca680f39a5e5722a3039f3b7ce0d2ba542d92cab1bc0ba7"} Dec 05 12:39:01 crc kubenswrapper[4728]: I1205 12:39:01.650217 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-kfrgk" event={"ID":"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c","Type":"ContainerStarted","Data":"47444f17919f65309d053af6e19132fb5f90aa0546318e0aaefe70f4e16b5a35"} Dec 05 12:39:02 crc kubenswrapper[4728]: I1205 12:39:02.668330 4728 generic.go:334] "Generic (PLEG): container finished" podID="6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" containerID="150f9b0aae85b31cbca680f39a5e5722a3039f3b7ce0d2ba542d92cab1bc0ba7" exitCode=0 Dec 05 12:39:02 crc kubenswrapper[4728]: I1205 12:39:02.668381 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-kfrgk" event={"ID":"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c","Type":"ContainerDied","Data":"150f9b0aae85b31cbca680f39a5e5722a3039f3b7ce0d2ba542d92cab1bc0ba7"} Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.821108 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.890988 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pggbx\" (UniqueName: \"kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx\") pod \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.891159 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host\") pod \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\" (UID: \"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c\") " Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.891341 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host" (OuterVolumeSpecName: "host") pod "6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" (UID: "6bde7cc0-8300-40c8-9b8f-a08e14c9a20c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.891918 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.896584 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx" (OuterVolumeSpecName: "kube-api-access-pggbx") pod "6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" (UID: "6bde7cc0-8300-40c8-9b8f-a08e14c9a20c"). InnerVolumeSpecName "kube-api-access-pggbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:39:03 crc kubenswrapper[4728]: I1205 12:39:03.993287 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pggbx\" (UniqueName: \"kubernetes.io/projected/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c-kube-api-access-pggbx\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:04 crc kubenswrapper[4728]: I1205 12:39:04.687167 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-kfrgk" event={"ID":"6bde7cc0-8300-40c8-9b8f-a08e14c9a20c","Type":"ContainerDied","Data":"47444f17919f65309d053af6e19132fb5f90aa0546318e0aaefe70f4e16b5a35"} Dec 05 12:39:04 crc kubenswrapper[4728]: I1205 12:39:04.687481 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47444f17919f65309d053af6e19132fb5f90aa0546318e0aaefe70f4e16b5a35" Dec 05 12:39:04 crc kubenswrapper[4728]: I1205 12:39:04.687567 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-kfrgk" Dec 05 12:39:04 crc kubenswrapper[4728]: I1205 12:39:04.915225 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58b87/crc-debug-kfrgk"] Dec 05 12:39:04 crc kubenswrapper[4728]: I1205 12:39:04.926018 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58b87/crc-debug-kfrgk"] Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.105130 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58b87/crc-debug-66bdx"] Dec 05 12:39:06 crc kubenswrapper[4728]: E1205 12:39:06.105876 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" containerName="container-00" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.105890 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" containerName="container-00" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.106093 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" containerName="container-00" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.106694 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.109133 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58b87"/"default-dockercfg-vlns6" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.236164 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.236336 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jncp\" (UniqueName: \"kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.337732 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.337895 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jncp\" (UniqueName: \"kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.338290 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.357481 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jncp\" (UniqueName: \"kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp\") pod \"crc-debug-66bdx\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.359229 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:39:06 crc kubenswrapper[4728]: E1205 12:39:06.359483 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.365509 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bde7cc0-8300-40c8-9b8f-a08e14c9a20c" path="/var/lib/kubelet/pods/6bde7cc0-8300-40c8-9b8f-a08e14c9a20c/volumes" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.426073 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:06 crc kubenswrapper[4728]: I1205 12:39:06.711208 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-66bdx" event={"ID":"c867d2f9-c165-454f-99db-3fdc5842a34a","Type":"ContainerStarted","Data":"4204365a7ae5b8e1bd6fbf7faca9343db17f1f0fab48ad71f585eaa7a09a9986"} Dec 05 12:39:07 crc kubenswrapper[4728]: I1205 12:39:07.723197 4728 generic.go:334] "Generic (PLEG): container finished" podID="c867d2f9-c165-454f-99db-3fdc5842a34a" containerID="afd8d2924d42d7824aaec154fe58078486860356bce8f976e2f02166e43c5cd6" exitCode=0 Dec 05 12:39:07 crc kubenswrapper[4728]: I1205 12:39:07.723397 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/crc-debug-66bdx" event={"ID":"c867d2f9-c165-454f-99db-3fdc5842a34a","Type":"ContainerDied","Data":"afd8d2924d42d7824aaec154fe58078486860356bce8f976e2f02166e43c5cd6"} Dec 05 12:39:07 crc kubenswrapper[4728]: I1205 12:39:07.767906 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58b87/crc-debug-66bdx"] Dec 05 12:39:07 crc kubenswrapper[4728]: I1205 12:39:07.777863 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58b87/crc-debug-66bdx"] Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.848572 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.887049 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jncp\" (UniqueName: \"kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp\") pod \"c867d2f9-c165-454f-99db-3fdc5842a34a\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.887456 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host\") pod \"c867d2f9-c165-454f-99db-3fdc5842a34a\" (UID: \"c867d2f9-c165-454f-99db-3fdc5842a34a\") " Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.887595 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host" (OuterVolumeSpecName: "host") pod "c867d2f9-c165-454f-99db-3fdc5842a34a" (UID: "c867d2f9-c165-454f-99db-3fdc5842a34a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.887961 4728 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c867d2f9-c165-454f-99db-3fdc5842a34a-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.894196 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp" (OuterVolumeSpecName: "kube-api-access-9jncp") pod "c867d2f9-c165-454f-99db-3fdc5842a34a" (UID: "c867d2f9-c165-454f-99db-3fdc5842a34a"). InnerVolumeSpecName "kube-api-access-9jncp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:39:08 crc kubenswrapper[4728]: I1205 12:39:08.989989 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jncp\" (UniqueName: \"kubernetes.io/projected/c867d2f9-c165-454f-99db-3fdc5842a34a-kube-api-access-9jncp\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.710838 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:09 crc kubenswrapper[4728]: E1205 12:39:09.711406 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c867d2f9-c165-454f-99db-3fdc5842a34a" containerName="container-00" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.711427 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="c867d2f9-c165-454f-99db-3fdc5842a34a" containerName="container-00" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.711670 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="c867d2f9-c165-454f-99db-3fdc5842a34a" containerName="container-00" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.713348 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.727987 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.771480 4728 scope.go:117] "RemoveContainer" containerID="afd8d2924d42d7824aaec154fe58078486860356bce8f976e2f02166e43c5cd6" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.771547 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/crc-debug-66bdx" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.826495 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.826687 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxj5v\" (UniqueName: \"kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.826875 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.929191 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.929282 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.929360 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxj5v\" (UniqueName: \"kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.930190 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.930452 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:09 crc kubenswrapper[4728]: I1205 12:39:09.952409 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxj5v\" (UniqueName: \"kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v\") pod \"redhat-operators-wjdd5\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:10 crc kubenswrapper[4728]: I1205 12:39:10.054406 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:10 crc kubenswrapper[4728]: I1205 12:39:10.367684 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c867d2f9-c165-454f-99db-3fdc5842a34a" path="/var/lib/kubelet/pods/c867d2f9-c165-454f-99db-3fdc5842a34a/volumes" Dec 05 12:39:10 crc kubenswrapper[4728]: I1205 12:39:10.589407 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:10 crc kubenswrapper[4728]: W1205 12:39:10.606679 4728 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7381bd29_ef43_445c_8a2c_ac0306ffbc67.slice/crio-33778accd3967e1f6ec835675545cc6abad656691589aed4ce467a2316585b0b WatchSource:0}: Error finding container 33778accd3967e1f6ec835675545cc6abad656691589aed4ce467a2316585b0b: Status 404 returned error can't find the container with id 33778accd3967e1f6ec835675545cc6abad656691589aed4ce467a2316585b0b Dec 05 12:39:10 crc kubenswrapper[4728]: I1205 12:39:10.784108 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerStarted","Data":"33778accd3967e1f6ec835675545cc6abad656691589aed4ce467a2316585b0b"} Dec 05 12:39:11 crc kubenswrapper[4728]: I1205 12:39:11.795225 4728 generic.go:334] "Generic (PLEG): container finished" podID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerID="b9e6903a9c588266581fd49c80e0b8d6c5caaf577199d079daa1c5c891901a82" exitCode=0 Dec 05 12:39:11 crc kubenswrapper[4728]: I1205 12:39:11.795282 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerDied","Data":"b9e6903a9c588266581fd49c80e0b8d6c5caaf577199d079daa1c5c891901a82"} Dec 05 12:39:11 crc kubenswrapper[4728]: I1205 12:39:11.797750 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:39:12 crc kubenswrapper[4728]: I1205 12:39:12.806841 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerStarted","Data":"032bd16c6894e355e526dbd6e145f07fb0618b7f5587778d6dd06093c09ff01c"} Dec 05 12:39:13 crc kubenswrapper[4728]: I1205 12:39:13.817472 4728 generic.go:334] "Generic (PLEG): container finished" podID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerID="032bd16c6894e355e526dbd6e145f07fb0618b7f5587778d6dd06093c09ff01c" exitCode=0 Dec 05 12:39:13 crc kubenswrapper[4728]: I1205 12:39:13.817681 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerDied","Data":"032bd16c6894e355e526dbd6e145f07fb0618b7f5587778d6dd06093c09ff01c"} Dec 05 12:39:15 crc kubenswrapper[4728]: I1205 12:39:15.844331 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerStarted","Data":"a829d32a906ec01b8a96d7a678dc8daaca3608cce74e06b26883709b9ecb9875"} Dec 05 12:39:15 crc kubenswrapper[4728]: I1205 12:39:15.870425 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wjdd5" podStartSLOduration=3.290271366 podStartE2EDuration="6.870405668s" podCreationTimestamp="2025-12-05 12:39:09 +0000 UTC" firstStartedPulling="2025-12-05 12:39:11.797461392 +0000 UTC m=+5485.939584085" lastFinishedPulling="2025-12-05 12:39:15.377595694 +0000 UTC m=+5489.519718387" observedRunningTime="2025-12-05 12:39:15.864279614 +0000 UTC m=+5490.006402317" watchObservedRunningTime="2025-12-05 12:39:15.870405668 +0000 UTC m=+5490.012528361" Dec 05 12:39:20 crc kubenswrapper[4728]: I1205 12:39:20.054835 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:20 crc kubenswrapper[4728]: I1205 12:39:20.055417 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:21 crc kubenswrapper[4728]: I1205 12:39:21.102429 4728 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wjdd5" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="registry-server" probeResult="failure" output=< Dec 05 12:39:21 crc kubenswrapper[4728]: timeout: failed to connect service ":50051" within 1s Dec 05 12:39:21 crc kubenswrapper[4728]: > Dec 05 12:39:21 crc kubenswrapper[4728]: I1205 12:39:21.352192 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:39:21 crc kubenswrapper[4728]: E1205 12:39:21.352473 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.034215 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-798767f9fd-kfrfz_24fcf86b-13a3-46c0-bea6-37ef4da29b48/barbican-api/0.log" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.113288 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.159783 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-798767f9fd-kfrfz_24fcf86b-13a3-46c0-bea6-37ef4da29b48/barbican-api-log/0.log" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.172667 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.267970 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76974b5d9d-khzwj_37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2/barbican-keystone-listener/0.log" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.364297 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.409444 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f87fccb55-qhksr_2b11e743-92a0-4601-8cdf-935c3cc54a55/barbican-worker/0.log" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.521684 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f87fccb55-qhksr_2b11e743-92a0-4601-8cdf-935c3cc54a55/barbican-worker-log/0.log" Dec 05 12:39:30 crc kubenswrapper[4728]: I1205 12:39:30.808865 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-rjm82_cf81cfab-dfe5-4ebb-87aa-ff462cd3d1f9/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.027125 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/ceilometer-central-agent/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.042774 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/proxy-httpd/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.060887 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/ceilometer-notification-agent/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.124990 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76974b5d9d-khzwj_37ac3cc7-ebdb-4ba9-97b6-c62b482b49c2/barbican-keystone-listener-log/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.247531 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7d98205-ffa7-4388-8fff-66caf169466f/sg-core/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.362444 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph_ec650e34-b972-46a5-886c-ba25b07fca9c/ceph/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.724528 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cfc23d81-9123-49a5-b770-4f0b60e01d35/cinder-api-log/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.778465 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cfc23d81-9123-49a5-b770-4f0b60e01d35/cinder-api/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.881253 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_75b2a689-5a22-4496-af32-4e93e0b2f3df/probe/0.log" Dec 05 12:39:31 crc kubenswrapper[4728]: I1205 12:39:31.998598 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wjdd5" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="registry-server" containerID="cri-o://a829d32a906ec01b8a96d7a678dc8daaca3608cce74e06b26883709b9ecb9875" gracePeriod=2 Dec 05 12:39:32 crc kubenswrapper[4728]: I1205 12:39:32.028046 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_61360ddf-d4ef-4328-add9-ac6c2d95d563/cinder-scheduler/0.log" Dec 05 12:39:32 crc kubenswrapper[4728]: I1205 12:39:32.325524 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_61360ddf-d4ef-4328-add9-ac6c2d95d563/probe/0.log" Dec 05 12:39:32 crc kubenswrapper[4728]: I1205 12:39:32.505872 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_23cf88b0-870f-44f4-9f15-aa4b15d86a12/probe/0.log" Dec 05 12:39:32 crc kubenswrapper[4728]: I1205 12:39:32.750666 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-24vs6_96cca126-d9b0-4c1c-93d8-63872e4a5e1c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.012921 4728 generic.go:334] "Generic (PLEG): container finished" podID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerID="a829d32a906ec01b8a96d7a678dc8daaca3608cce74e06b26883709b9ecb9875" exitCode=0 Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.013189 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerDied","Data":"a829d32a906ec01b8a96d7a678dc8daaca3608cce74e06b26883709b9ecb9875"} Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.053127 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-zsv4q_a4231d17-68db-4e1d-b39d-6d3affe3c6a5/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.227465 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/init/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.276331 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_75b2a689-5a22-4496-af32-4e93e0b2f3df/cinder-backup/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.506121 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/init/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.695091 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.778492 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities\") pod \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.778564 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content\") pod \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.778739 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxj5v\" (UniqueName: \"kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v\") pod \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\" (UID: \"7381bd29-ef43-445c-8a2c-ac0306ffbc67\") " Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.784671 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities" (OuterVolumeSpecName: "utilities") pod "7381bd29-ef43-445c-8a2c-ac0306ffbc67" (UID: "7381bd29-ef43-445c-8a2c-ac0306ffbc67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.785362 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v" (OuterVolumeSpecName: "kube-api-access-nxj5v") pod "7381bd29-ef43-445c-8a2c-ac0306ffbc67" (UID: "7381bd29-ef43-445c-8a2c-ac0306ffbc67"). InnerVolumeSpecName "kube-api-access-nxj5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.833479 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-rn9wc_2b484238-d80c-4274-b0b6-ea03a050e575/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.878638 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d99fc9df9-2cd8j_d49621d5-c052-4869-ace8-926a18cc570d/dnsmasq-dns/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.881158 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.881192 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxj5v\" (UniqueName: \"kubernetes.io/projected/7381bd29-ef43-445c-8a2c-ac0306ffbc67-kube-api-access-nxj5v\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.923223 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7381bd29-ef43-445c-8a2c-ac0306ffbc67" (UID: "7381bd29-ef43-445c-8a2c-ac0306ffbc67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.942288 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_32cf3773-bc1a-4c62-9b1a-8fc95e42e403/glance-httpd/0.log" Dec 05 12:39:33 crc kubenswrapper[4728]: I1205 12:39:33.983195 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7381bd29-ef43-445c-8a2c-ac0306ffbc67-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.023417 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjdd5" event={"ID":"7381bd29-ef43-445c-8a2c-ac0306ffbc67","Type":"ContainerDied","Data":"33778accd3967e1f6ec835675545cc6abad656691589aed4ce467a2316585b0b"} Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.023461 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjdd5" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.023472 4728 scope.go:117] "RemoveContainer" containerID="a829d32a906ec01b8a96d7a678dc8daaca3608cce74e06b26883709b9ecb9875" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.056517 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.056709 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_32cf3773-bc1a-4c62-9b1a-8fc95e42e403/glance-log/0.log" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.061024 4728 scope.go:117] "RemoveContainer" containerID="032bd16c6894e355e526dbd6e145f07fb0618b7f5587778d6dd06093c09ff01c" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.078532 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wjdd5"] Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.104421 4728 scope.go:117] "RemoveContainer" containerID="b9e6903a9c588266581fd49c80e0b8d6c5caaf577199d079daa1c5c891901a82" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.218195 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f030a4aa-1b8c-4889-9385-56c75001c4f5/glance-log/0.log" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.301501 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f030a4aa-1b8c-4889-9385-56c75001c4f5/glance-httpd/0.log" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.352580 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:39:34 crc kubenswrapper[4728]: E1205 12:39:34.353137 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.369356 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" path="/var/lib/kubelet/pods/7381bd29-ef43-445c-8a2c-ac0306ffbc67/volumes" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.562144 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wscr4_c307593d-70fb-42ac-987a-9e7639f530c6/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.685516 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7755888bd8-shzsv_841ca27f-0486-413e-975b-4f51b008883a/horizon/0.log" Dec 05 12:39:34 crc kubenswrapper[4728]: I1205 12:39:34.803251 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5jh58_d2794a25-aa06-4146-957e-5438b4005382/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:35 crc kubenswrapper[4728]: I1205 12:39:35.202257 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415601-tk5jd_5ab6c45e-d05b-4ddc-92e0-5addedce425d/keystone-cron/0.log" Dec 05 12:39:35 crc kubenswrapper[4728]: I1205 12:39:35.309365 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_23cf88b0-870f-44f4-9f15-aa4b15d86a12/cinder-volume/0.log" Dec 05 12:39:35 crc kubenswrapper[4728]: I1205 12:39:35.339935 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7755888bd8-shzsv_841ca27f-0486-413e-975b-4f51b008883a/horizon-log/0.log" Dec 05 12:39:35 crc kubenswrapper[4728]: I1205 12:39:35.399207 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_910efe4a-03b6-4aa7-aa87-d69b832a3db9/kube-state-metrics/0.log" Dec 05 12:39:35 crc kubenswrapper[4728]: I1205 12:39:35.632061 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-h792n_21cac74f-ba27-4db1-9cbe-6189f230e514/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.070660 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_30fe7fc4-13df-437a-8771-c6904804bcb9/probe/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.300964 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_30fe7fc4-13df-437a-8771-c6904804bcb9/manila-scheduler/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.327868 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_686f83cd-910d-4bf2-977a-8544326152e4/manila-api/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.541909 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e76d402c-8c19-4097-8c06-9bb28018f661/probe/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.833075 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_686f83cd-910d-4bf2-977a-8544326152e4/manila-api-log/0.log" Dec 05 12:39:36 crc kubenswrapper[4728]: I1205 12:39:36.934774 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e76d402c-8c19-4097-8c06-9bb28018f661/manila-share/0.log" Dec 05 12:39:37 crc kubenswrapper[4728]: I1205 12:39:37.443069 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-clmjj_60e7995e-9ae7-47b3-bd6a-991c444af447/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:37 crc kubenswrapper[4728]: I1205 12:39:37.779133 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5cf4cb67d5-pxwtj_9a26b328-f443-4f9a-a2ae-2042e3189096/neutron-httpd/0.log" Dec 05 12:39:38 crc kubenswrapper[4728]: I1205 12:39:38.327001 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5cf4cb67d5-pxwtj_9a26b328-f443-4f9a-a2ae-2042e3189096/neutron-api/0.log" Dec 05 12:39:39 crc kubenswrapper[4728]: I1205 12:39:39.506496 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7802cd18-c771-414b-afd5-f6d47c588a58/nova-cell0-conductor-conductor/0.log" Dec 05 12:39:39 crc kubenswrapper[4728]: I1205 12:39:39.567182 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6fbd7fcb8c-kr5v8_523f920a-f4d7-46db-8066-ad0c4f8d22d5/keystone-api/0.log" Dec 05 12:39:40 crc kubenswrapper[4728]: I1205 12:39:40.021665 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_50d65ab3-36a4-45da-bfbd-b66ff1541c6b/nova-cell1-conductor-conductor/0.log" Dec 05 12:39:40 crc kubenswrapper[4728]: I1205 12:39:40.328008 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e7183619-beae-465b-86cf-ccbb710d4ac8/nova-api-log/0.log" Dec 05 12:39:40 crc kubenswrapper[4728]: I1205 12:39:40.404457 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_3c17270c-7319-4bc9-af0b-f008615371f9/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 12:39:40 crc kubenswrapper[4728]: I1205 12:39:40.664430 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bw26p_1284e61e-761e-482e-930f-ba0e75280dd7/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:40 crc kubenswrapper[4728]: I1205 12:39:40.783629 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7a68b1f8-9521-44c8-8a8e-5bc26bc28047/nova-metadata-log/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.174722 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e7183619-beae-465b-86cf-ccbb710d4ac8/nova-api-api/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.383586 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/mysql-bootstrap/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.536787 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_aa4cedf1-fd8d-4339-8569-f105adb2ca1a/nova-scheduler-scheduler/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.651735 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/mysql-bootstrap/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.653044 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d9cada47-64db-4c9c-8598-917e4099a8a6/galera/0.log" Dec 05 12:39:41 crc kubenswrapper[4728]: I1205 12:39:41.910733 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/mysql-bootstrap/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.143872 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/galera/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.148160 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_08dfc6f9-aba5-4869-bdd3-7e3e33754318/mysql-bootstrap/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.328002 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6860f6fe-8127-4cbd-af2d-7e5e0e4ed001/openstackclient/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.469784 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-74pwl_ed80e7c1-b5a1-4606-b110-5d205dd122b4/ovn-controller/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.666391 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-5shht_7e189ef3-6dab-4ce4-9cc3-b9bd409342ba/openstack-network-exporter/0.log" Dec 05 12:39:42 crc kubenswrapper[4728]: I1205 12:39:42.853274 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server-init/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.081855 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7a68b1f8-9521-44c8-8a8e-5bc26bc28047/nova-metadata-metadata/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.085328 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server-init/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.124273 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovs-vswitchd/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.125870 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csgtz_a087e318-da4d-49e0-826e-198c5afc0a15/ovsdb-server/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.331725 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_de655c8f-ba39-41bb-a5c0-c3195d4999ea/openstack-network-exporter/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.376632 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-tszdd_e1b21c39-5973-43f9-a5f5-73f7e3a1f778/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.590920 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_de655c8f-ba39-41bb-a5c0-c3195d4999ea/ovn-northd/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.668355 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_de9c5f8c-fb05-44a5-804d-1f8f2129da92/openstack-network-exporter/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.728992 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_de9c5f8c-fb05-44a5-804d-1f8f2129da92/ovsdbserver-nb/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.926565 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_269bbc54-5980-4de2-ac45-d1d7ff6335e9/openstack-network-exporter/0.log" Dec 05 12:39:43 crc kubenswrapper[4728]: I1205 12:39:43.928079 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_269bbc54-5980-4de2-ac45-d1d7ff6335e9/ovsdbserver-sb/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.301474 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/setup-container/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.512319 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/setup-container/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.559463 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_297a0136-c048-4d32-ae75-2691e2bb98b4/rabbitmq/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.634436 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-554ccc7b5b-l2c6v_d7d32022-fd6b-4ecd-83d4-5b628f19e413/placement-api/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.772175 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-554ccc7b5b-l2c6v_d7d32022-fd6b-4ecd-83d4-5b628f19e413/placement-log/0.log" Dec 05 12:39:44 crc kubenswrapper[4728]: I1205 12:39:44.802402 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/setup-container/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.050704 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/setup-container/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.104808 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-pj5b7_be7ac6c7-643c-42b4-bae5-0eab2ee3aea0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.114392 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9f981b7a-7de1-4ce3-ae26-5693c659923d/rabbitmq/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.333289 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-8bbzt_12c3ce02-598e-48b2-b81c-7f80d3589de4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.345305 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-m6vb7_a97a24e6-7ec7-48ea-8dcb-bc6c72a64f67/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.546550 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7jz8f_45fff4a4-1d89-41c0-a166-935f921ad8ec/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.736743 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-p6mdl_63543ad1-6aa4-4b72-aa6d-4438fad98d08/ssh-known-hosts-edpm-deployment/0.log" Dec 05 12:39:45 crc kubenswrapper[4728]: I1205 12:39:45.933972 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6bf4577867-xpjkr_9e762b75-33c7-464f-a8a9-316b5209b2b3/proxy-server/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.028971 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-mwr89_f047ba61-512e-4899-95ec-2dd4a1862858/swift-ring-rebalance/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.090299 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6bf4577867-xpjkr_9e762b75-33c7-464f-a8a9-316b5209b2b3/proxy-httpd/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.178336 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-auditor/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.269679 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-reaper/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.363851 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-replicator/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.379683 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/account-server/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.447282 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-auditor/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.557498 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-replicator/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.581113 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-server/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.650288 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/container-updater/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.669738 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-auditor/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.815815 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-expirer/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.853637 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-replicator/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.906438 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-updater/0.log" Dec 05 12:39:46 crc kubenswrapper[4728]: I1205 12:39:46.915424 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/object-server/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.099738 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/rsync/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.237760 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mb8fg_08a82141-a6d5-4c68-9adb-9c4158a6c7c2/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.240031 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a64af6ac-e922-435f-bee9-1cc7e7a95f4a/swift-recon-cron/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.352000 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:39:47 crc kubenswrapper[4728]: E1205 12:39:47.352350 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.469493 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cbc422a5-ef18-4b9d-a3a4-c783d200dc25/test-operator-logs-container/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.587436 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_b71aa6bd-22ea-4144-84ea-a241546286a2/tempest-tests-tempest-tests-runner/0.log" Dec 05 12:39:47 crc kubenswrapper[4728]: I1205 12:39:47.687936 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-w2kxm_8ea93fe3-4992-4d62-b2c2-f67ca4763c75/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:39:59 crc kubenswrapper[4728]: I1205 12:39:59.352618 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:39:59 crc kubenswrapper[4728]: E1205 12:39:59.353398 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:40:01 crc kubenswrapper[4728]: I1205 12:40:01.962206 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_efbf5688-8330-4166-a93b-03dcf8ed578d/memcached/0.log" Dec 05 12:40:11 crc kubenswrapper[4728]: I1205 12:40:11.351467 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:40:11 crc kubenswrapper[4728]: E1205 12:40:11.352176 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.116536 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-btnkh_5d689dc0-c7c8-4af2-8f4c-45863ab88b69/manager/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.117246 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-btnkh_5d689dc0-c7c8-4af2-8f4c-45863ab88b69/kube-rbac-proxy/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.337465 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-zqc5j_941ddd04-049e-4247-98c2-6ef2117c2c69/kube-rbac-proxy/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.380496 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-zqc5j_941ddd04-049e-4247-98c2-6ef2117c2c69/manager/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.496532 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-dnlfm_df0f8091-3107-4a49-9672-8332e4c1f8c0/kube-rbac-proxy/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.538397 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-dnlfm_df0f8091-3107-4a49-9672-8332e4c1f8c0/manager/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.643764 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.846870 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.863212 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:40:16 crc kubenswrapper[4728]: I1205 12:40:16.865269 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.057621 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/util/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.068153 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/pull/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.074038 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e48d19870fbb270f90e07dce4d9bad603b7d4900857291efd849a97a2e5tx2p_e0dc12b8-8d15-4ef7-a8f9-985442001a82/extract/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.245031 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-nch5j_3149306c-d64a-4bdf-994a-ecec0489e472/kube-rbac-proxy/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.327409 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-nch5j_3149306c-d64a-4bdf-994a-ecec0489e472/manager/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.344150 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-ghbcj_403718e0-87fa-402a-844e-6b458a15b003/kube-rbac-proxy/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.503930 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-ghbcj_403718e0-87fa-402a-844e-6b458a15b003/manager/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.589549 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-2zvrw_34f5a6c5-a316-450d-83a1-affbdd4d2e0e/manager/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.637477 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-2zvrw_34f5a6c5-a316-450d-83a1-affbdd4d2e0e/kube-rbac-proxy/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.763223 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-c2s6r_a1c012ce-e23c-4235-b2b2-56306e3d4722/kube-rbac-proxy/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.954023 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-7jg64_03a7d3e9-4e85-496e-963f-f0c1e7e4cf04/kube-rbac-proxy/0.log" Dec 05 12:40:17 crc kubenswrapper[4728]: I1205 12:40:17.973715 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-c2s6r_a1c012ce-e23c-4235-b2b2-56306e3d4722/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.017008 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-7jg64_03a7d3e9-4e85-496e-963f-f0c1e7e4cf04/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.173715 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6wcpq_6f5ec4c9-95e8-43ea-a137-9c781e4f234f/kube-rbac-proxy/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.261829 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6wcpq_6f5ec4c9-95e8-43ea-a137-9c781e4f234f/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.355649 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n8x6r_d21af02e-d731-402f-aa09-1f705dc4e82b/kube-rbac-proxy/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.451676 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n8x6r_d21af02e-d731-402f-aa09-1f705dc4e82b/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.537183 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-xjwhb_ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10/kube-rbac-proxy/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.587650 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-xjwhb_ce9ea80b-ff4e-49a8-a1b6-7f7ea21dbf10/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.740711 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-sdqtl_98cd0df5-f4a5-4515-80b5-d0ac625a527a/kube-rbac-proxy/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.798490 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-sdqtl_98cd0df5-f4a5-4515-80b5-d0ac625a527a/manager/0.log" Dec 05 12:40:18 crc kubenswrapper[4728]: I1205 12:40:18.947681 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8fvcf_45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6/kube-rbac-proxy/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.025438 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8fvcf_45a6cdaf-b12d-4f86-b6ce-7761cfd5aee6/manager/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.107924 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-t7lpc_854e2a55-450f-48e7-93fb-fca327f4fd18/kube-rbac-proxy/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.178805 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-t7lpc_854e2a55-450f-48e7-93fb-fca327f4fd18/manager/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.292677 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm_6ee43140-9d2f-42c8-917f-eaa028a8e1b1/kube-rbac-proxy/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.425489 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4wlcvm_6ee43140-9d2f-42c8-917f-eaa028a8e1b1/manager/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.771180 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-lvhsr_bcbc2f72-6b48-4afd-9b1a-cd9d4a32692d/registry-server/0.log" Dec 05 12:40:19 crc kubenswrapper[4728]: I1205 12:40:19.851945 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6767b55986-t74w7_226ccaf4-1c8b-4a98-a3a6-122629462baa/operator/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.049944 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pqmth_b5925d20-e10a-4564-91f4-67acb55b2a01/kube-rbac-proxy/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.179748 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pqmth_b5925d20-e10a-4564-91f4-67acb55b2a01/manager/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.247536 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xnq92_dd2fed26-0e5c-49e0-ad15-3936a13680e7/kube-rbac-proxy/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.297458 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xnq92_dd2fed26-0e5c-49e0-ad15-3936a13680e7/manager/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.483098 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-sb7lq_04a349f4-b388-4a9c-8dbc-54bd1fb46934/operator/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.567494 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-h6czw_db8744a4-edde-4a54-85e9-05089f650ba0/kube-rbac-proxy/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.700372 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-h6czw_db8744a4-edde-4a54-85e9-05089f650ba0/manager/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.763053 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-64b69b8785-cvs4m_891e8e93-da9a-4b87-8e69-04fe149274cd/manager/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.842126 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-gbdj9_c16435ec-544a-4d19-8667-925c045ecf61/kube-rbac-proxy/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.847517 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-gbdj9_c16435ec-544a-4d19-8667-925c045ecf61/manager/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.929101 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sq989_5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d/kube-rbac-proxy/0.log" Dec 05 12:40:20 crc kubenswrapper[4728]: I1205 12:40:20.987679 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-sq989_5d7b3b1e-b8a3-4477-8d37-cfd6db1cf27d/manager/0.log" Dec 05 12:40:21 crc kubenswrapper[4728]: I1205 12:40:21.022554 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-tc4ks_a5b101e4-a4f7-4c73-8327-e09cce07eb51/kube-rbac-proxy/0.log" Dec 05 12:40:21 crc kubenswrapper[4728]: I1205 12:40:21.119680 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-tc4ks_a5b101e4-a4f7-4c73-8327-e09cce07eb51/manager/0.log" Dec 05 12:40:26 crc kubenswrapper[4728]: I1205 12:40:26.358984 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:40:27 crc kubenswrapper[4728]: I1205 12:40:27.518670 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce"} Dec 05 12:40:38 crc kubenswrapper[4728]: I1205 12:40:38.981532 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5x4xq_99f17d34-cfff-4706-af23-04fff3d500bd/control-plane-machine-set-operator/0.log" Dec 05 12:40:39 crc kubenswrapper[4728]: I1205 12:40:39.176461 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vllpv_f4d671a2-6454-4bf6-a099-0c0e15de2f20/machine-api-operator/0.log" Dec 05 12:40:39 crc kubenswrapper[4728]: I1205 12:40:39.176555 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vllpv_f4d671a2-6454-4bf6-a099-0c0e15de2f20/kube-rbac-proxy/0.log" Dec 05 12:40:51 crc kubenswrapper[4728]: I1205 12:40:51.939610 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hhc56_159ff938-2eac-4774-beeb-18122124ceef/cert-manager-controller/0.log" Dec 05 12:40:52 crc kubenswrapper[4728]: I1205 12:40:52.311176 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zxdgf_21cf202a-ede4-4ba9-9180-2dcde628cd09/cert-manager-cainjector/0.log" Dec 05 12:40:52 crc kubenswrapper[4728]: I1205 12:40:52.359557 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-fb8dg_2cf07d8e-ad99-47a1-b4b7-0b37e78f81f0/cert-manager-webhook/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.313017 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gxx2h_f97e7e26-99e0-403f-a6d5-5aa008101459/nmstate-console-plugin/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.423122 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-x9g4q_522d4b95-dda7-40b8-960e-f19f1b147c41/nmstate-handler/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.510326 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-6jmjp_c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6/kube-rbac-proxy/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.519561 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-6jmjp_c560c0c5-c5bd-41b6-a77c-b8ff9452b7e6/nmstate-metrics/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.669724 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5rg82_2ce6b79a-c293-472b-90f8-7b56ce77b4cf/nmstate-operator/0.log" Dec 05 12:41:05 crc kubenswrapper[4728]: I1205 12:41:05.738536 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-7xz5t_7c2cda78-4bb1-416c-8762-8c1618a755ad/nmstate-webhook/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.350428 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2frdd_09963a34-a1db-4854-8a6a-475da8222a7b/kube-rbac-proxy/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.547626 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2frdd_09963a34-a1db-4854-8a6a-475da8222a7b/controller/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.582662 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.834387 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.854234 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.854429 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:41:20 crc kubenswrapper[4728]: I1205 12:41:20.860727 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.039626 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.072861 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.075006 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.137708 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.305088 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-metrics/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.306092 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-reloader/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.319250 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/cp-frr-files/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.356121 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/controller/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.527110 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/frr-metrics/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.559842 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/kube-rbac-proxy/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.636004 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/kube-rbac-proxy-frr/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.731509 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/reloader/0.log" Dec 05 12:41:21 crc kubenswrapper[4728]: I1205 12:41:21.945310 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-9m6g5_b85fcb5b-5696-42c4-bb19-1e0d5fa8ff06/frr-k8s-webhook-server/0.log" Dec 05 12:41:22 crc kubenswrapper[4728]: I1205 12:41:22.018167 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-dc46c65cb-kfn26_1f9a485b-4186-4184-9f2a-81a4b74105d9/manager/0.log" Dec 05 12:41:22 crc kubenswrapper[4728]: I1205 12:41:22.292528 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-585ddd7f97-8nw7b_e085b90a-7d0a-4027-bf16-477076627681/webhook-server/0.log" Dec 05 12:41:22 crc kubenswrapper[4728]: I1205 12:41:22.488680 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7wkzn_ee5be811-5e9e-4a19-955b-944a9a457060/kube-rbac-proxy/0.log" Dec 05 12:41:23 crc kubenswrapper[4728]: I1205 12:41:23.015110 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7wkzn_ee5be811-5e9e-4a19-955b-944a9a457060/speaker/0.log" Dec 05 12:41:23 crc kubenswrapper[4728]: I1205 12:41:23.247893 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fpsrk_cc6442af-701e-429c-9fe5-93dbe8884f45/frr/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.006297 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.209032 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.224586 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.242515 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.421322 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.428855 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/pull/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.458555 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7x7pw_5ce9b03b-d5f8-4249-80c3-8e637c09bf8e/extract/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.584000 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.810759 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.818933 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.824639 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.969921 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/util/0.log" Dec 05 12:41:35 crc kubenswrapper[4728]: I1205 12:41:35.970869 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/pull/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.004242 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83hnsxg_dbd9c8ae-586e-4200-bc76-b9bd8bb33b0f/extract/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.120725 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.311073 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.318254 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.318261 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.505678 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-content/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.523192 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/extract-utilities/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.772534 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.965274 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:41:36 crc kubenswrapper[4728]: I1205 12:41:36.996743 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.027522 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.246734 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tvtdk_ad6f43a7-1af1-48d1-802f-a2c36bab80cd/registry-server/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.262808 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-content/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.268200 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/extract-utilities/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.562845 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-nxkrc_416c984c-f4c2-4b3f-8dd6-c27724ac7c42/marketplace-operator/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.724102 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.987167 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.989009 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:41:37 crc kubenswrapper[4728]: I1205 12:41:37.995601 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.062876 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-s5vrv_f5e6b28c-ad1c-4ae0-a757-ab7cd65b94e7/registry-server/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.199876 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-utilities/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.240081 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/extract-content/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.336515 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9sklm_4f2ed4d0-156e-4dca-ad81-dc56dbb3b8c2/registry-server/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.380925 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.595891 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.600604 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.625059 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.767878 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-utilities/0.log" Dec 05 12:41:38 crc kubenswrapper[4728]: I1205 12:41:38.790024 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/extract-content/0.log" Dec 05 12:41:39 crc kubenswrapper[4728]: I1205 12:41:39.552112 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hvhhm_bf6fb737-e78f-496c-9ae3-5067c4300f62/registry-server/0.log" Dec 05 12:42:55 crc kubenswrapper[4728]: I1205 12:42:55.702007 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:42:55 crc kubenswrapper[4728]: I1205 12:42:55.702611 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:43:25 crc kubenswrapper[4728]: I1205 12:43:25.702267 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:43:25 crc kubenswrapper[4728]: I1205 12:43:25.702747 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:43:49 crc kubenswrapper[4728]: I1205 12:43:49.110556 4728 generic.go:334] "Generic (PLEG): container finished" podID="32666438-f1e1-4418-bcf4-53164c4fe660" containerID="f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3" exitCode=0 Dec 05 12:43:49 crc kubenswrapper[4728]: I1205 12:43:49.111123 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58b87/must-gather-2nh47" event={"ID":"32666438-f1e1-4418-bcf4-53164c4fe660","Type":"ContainerDied","Data":"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3"} Dec 05 12:43:49 crc kubenswrapper[4728]: I1205 12:43:49.113008 4728 scope.go:117] "RemoveContainer" containerID="f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3" Dec 05 12:43:50 crc kubenswrapper[4728]: I1205 12:43:50.096693 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-58b87_must-gather-2nh47_32666438-f1e1-4418-bcf4-53164c4fe660/gather/0.log" Dec 05 12:43:55 crc kubenswrapper[4728]: I1205 12:43:55.702596 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:43:55 crc kubenswrapper[4728]: I1205 12:43:55.703439 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:43:55 crc kubenswrapper[4728]: I1205 12:43:55.703480 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:43:55 crc kubenswrapper[4728]: I1205 12:43:55.704646 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:43:55 crc kubenswrapper[4728]: I1205 12:43:55.704704 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce" gracePeriod=600 Dec 05 12:43:56 crc kubenswrapper[4728]: I1205 12:43:56.190263 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce" exitCode=0 Dec 05 12:43:56 crc kubenswrapper[4728]: I1205 12:43:56.190781 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce"} Dec 05 12:43:56 crc kubenswrapper[4728]: I1205 12:43:56.190923 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerStarted","Data":"237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e"} Dec 05 12:43:56 crc kubenswrapper[4728]: I1205 12:43:56.191031 4728 scope.go:117] "RemoveContainer" containerID="ae8200e36b953d8a7b1f348f59b89f13d9c5c73ea3665ac570f46d3131918f69" Dec 05 12:44:02 crc kubenswrapper[4728]: I1205 12:44:02.585270 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58b87/must-gather-2nh47"] Dec 05 12:44:02 crc kubenswrapper[4728]: I1205 12:44:02.586059 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-58b87/must-gather-2nh47" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="copy" containerID="cri-o://7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db" gracePeriod=2 Dec 05 12:44:02 crc kubenswrapper[4728]: I1205 12:44:02.593768 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58b87/must-gather-2nh47"] Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.065302 4728 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-58b87_must-gather-2nh47_32666438-f1e1-4418-bcf4-53164c4fe660/copy/0.log" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.066248 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.173730 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2crpg\" (UniqueName: \"kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg\") pod \"32666438-f1e1-4418-bcf4-53164c4fe660\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.173879 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output\") pod \"32666438-f1e1-4418-bcf4-53164c4fe660\" (UID: \"32666438-f1e1-4418-bcf4-53164c4fe660\") " Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.185130 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg" (OuterVolumeSpecName: "kube-api-access-2crpg") pod "32666438-f1e1-4418-bcf4-53164c4fe660" (UID: "32666438-f1e1-4418-bcf4-53164c4fe660"). InnerVolumeSpecName "kube-api-access-2crpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.249874 4728 generic.go:334] "Generic (PLEG): container finished" podID="32666438-f1e1-4418-bcf4-53164c4fe660" containerID="7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db" exitCode=143 Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.249952 4728 scope.go:117] "RemoveContainer" containerID="7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.250069 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58b87/must-gather-2nh47" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.276355 4728 scope.go:117] "RemoveContainer" containerID="f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.278828 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2crpg\" (UniqueName: \"kubernetes.io/projected/32666438-f1e1-4418-bcf4-53164c4fe660-kube-api-access-2crpg\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.414607 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "32666438-f1e1-4418-bcf4-53164c4fe660" (UID: "32666438-f1e1-4418-bcf4-53164c4fe660"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.428895 4728 scope.go:117] "RemoveContainer" containerID="7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db" Dec 05 12:44:03 crc kubenswrapper[4728]: E1205 12:44:03.429405 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db\": container with ID starting with 7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db not found: ID does not exist" containerID="7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.429454 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db"} err="failed to get container status \"7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db\": rpc error: code = NotFound desc = could not find container \"7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db\": container with ID starting with 7c67675ef4f7d4d83b18dbf3cff292e10bdc25a1fb8ce852dca1d06a88a604db not found: ID does not exist" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.429487 4728 scope.go:117] "RemoveContainer" containerID="f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3" Dec 05 12:44:03 crc kubenswrapper[4728]: E1205 12:44:03.433198 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3\": container with ID starting with f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3 not found: ID does not exist" containerID="f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.433267 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3"} err="failed to get container status \"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3\": rpc error: code = NotFound desc = could not find container \"f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3\": container with ID starting with f709369c59864a6ce41544494e079f85d9cb4c41867550146b25753d3a4f93b3 not found: ID does not exist" Dec 05 12:44:03 crc kubenswrapper[4728]: I1205 12:44:03.502557 4728 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32666438-f1e1-4418-bcf4-53164c4fe660-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:04 crc kubenswrapper[4728]: I1205 12:44:04.364594 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" path="/var/lib/kubelet/pods/32666438-f1e1-4418-bcf4-53164c4fe660/volumes" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.169240 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458"] Dec 05 12:45:00 crc kubenswrapper[4728]: E1205 12:45:00.171009 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="copy" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171026 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="copy" Dec 05 12:45:00 crc kubenswrapper[4728]: E1205 12:45:00.171052 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="gather" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171061 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="gather" Dec 05 12:45:00 crc kubenswrapper[4728]: E1205 12:45:00.171081 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="extract-content" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171091 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="extract-content" Dec 05 12:45:00 crc kubenswrapper[4728]: E1205 12:45:00.171122 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="registry-server" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171127 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="registry-server" Dec 05 12:45:00 crc kubenswrapper[4728]: E1205 12:45:00.171153 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="extract-utilities" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171163 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="extract-utilities" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171400 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="7381bd29-ef43-445c-8a2c-ac0306ffbc67" containerName="registry-server" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171416 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="copy" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.171442 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="32666438-f1e1-4418-bcf4-53164c4fe660" containerName="gather" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.172492 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.179485 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458"] Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.179876 4728 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.180089 4728 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.298037 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.298147 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.298339 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74zfm\" (UniqueName: \"kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.400496 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74zfm\" (UniqueName: \"kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.400636 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.400715 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.402126 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.409539 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.418465 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74zfm\" (UniqueName: \"kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm\") pod \"collect-profiles-29415645-jk458\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.502248 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:00 crc kubenswrapper[4728]: I1205 12:45:00.962988 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458"] Dec 05 12:45:01 crc kubenswrapper[4728]: I1205 12:45:01.788222 4728 generic.go:334] "Generic (PLEG): container finished" podID="1fffda5b-cf27-458c-94e4-da2223636157" containerID="04d1cc90268b3a65e05ba2fcf103c247a45895063ae8ae2982d3325053375fa2" exitCode=0 Dec 05 12:45:01 crc kubenswrapper[4728]: I1205 12:45:01.788274 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" event={"ID":"1fffda5b-cf27-458c-94e4-da2223636157","Type":"ContainerDied","Data":"04d1cc90268b3a65e05ba2fcf103c247a45895063ae8ae2982d3325053375fa2"} Dec 05 12:45:01 crc kubenswrapper[4728]: I1205 12:45:01.788329 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" event={"ID":"1fffda5b-cf27-458c-94e4-da2223636157","Type":"ContainerStarted","Data":"ef2e1306fe8c1b594d815968176af3d7318f520cbd58ad067340475d64c22299"} Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.153406 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.232695 4728 scope.go:117] "RemoveContainer" containerID="150f9b0aae85b31cbca680f39a5e5722a3039f3b7ce0d2ba542d92cab1bc0ba7" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.264731 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74zfm\" (UniqueName: \"kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm\") pod \"1fffda5b-cf27-458c-94e4-da2223636157\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.264921 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume\") pod \"1fffda5b-cf27-458c-94e4-da2223636157\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.265070 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume\") pod \"1fffda5b-cf27-458c-94e4-da2223636157\" (UID: \"1fffda5b-cf27-458c-94e4-da2223636157\") " Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.266619 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume" (OuterVolumeSpecName: "config-volume") pod "1fffda5b-cf27-458c-94e4-da2223636157" (UID: "1fffda5b-cf27-458c-94e4-da2223636157"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.271182 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1fffda5b-cf27-458c-94e4-da2223636157" (UID: "1fffda5b-cf27-458c-94e4-da2223636157"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.271198 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm" (OuterVolumeSpecName: "kube-api-access-74zfm") pod "1fffda5b-cf27-458c-94e4-da2223636157" (UID: "1fffda5b-cf27-458c-94e4-da2223636157"). InnerVolumeSpecName "kube-api-access-74zfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.367501 4728 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1fffda5b-cf27-458c-94e4-da2223636157-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.367541 4728 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1fffda5b-cf27-458c-94e4-da2223636157-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.367556 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74zfm\" (UniqueName: \"kubernetes.io/projected/1fffda5b-cf27-458c-94e4-da2223636157-kube-api-access-74zfm\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.782405 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:03 crc kubenswrapper[4728]: E1205 12:45:03.783307 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fffda5b-cf27-458c-94e4-da2223636157" containerName="collect-profiles" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.783333 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fffda5b-cf27-458c-94e4-da2223636157" containerName="collect-profiles" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.783598 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fffda5b-cf27-458c-94e4-da2223636157" containerName="collect-profiles" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.785749 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.794174 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.824935 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" event={"ID":"1fffda5b-cf27-458c-94e4-da2223636157","Type":"ContainerDied","Data":"ef2e1306fe8c1b594d815968176af3d7318f520cbd58ad067340475d64c22299"} Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.824982 4728 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef2e1306fe8c1b594d815968176af3d7318f520cbd58ad067340475d64c22299" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.825071 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-jk458" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.878651 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.878711 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.878819 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x68tz\" (UniqueName: \"kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.980058 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.980172 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x68tz\" (UniqueName: \"kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.980275 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.980760 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:03 crc kubenswrapper[4728]: I1205 12:45:03.980884 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.008178 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x68tz\" (UniqueName: \"kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz\") pod \"community-operators-xbvx2\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.103052 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.285352 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj"] Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.303233 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-kbdcj"] Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.394127 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d9578ce-cff6-4087-bb68-c74eaf3dccdf" path="/var/lib/kubelet/pods/9d9578ce-cff6-4087-bb68-c74eaf3dccdf/volumes" Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.792608 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:04 crc kubenswrapper[4728]: I1205 12:45:04.835069 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerStarted","Data":"b2b634974195e81b50c295ff2586fe08f67938447e53a799f20ab3a9f3b920d2"} Dec 05 12:45:05 crc kubenswrapper[4728]: I1205 12:45:05.846526 4728 generic.go:334] "Generic (PLEG): container finished" podID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerID="8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e" exitCode=0 Dec 05 12:45:05 crc kubenswrapper[4728]: I1205 12:45:05.846608 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerDied","Data":"8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e"} Dec 05 12:45:05 crc kubenswrapper[4728]: I1205 12:45:05.849329 4728 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:45:07 crc kubenswrapper[4728]: I1205 12:45:07.868992 4728 generic.go:334] "Generic (PLEG): container finished" podID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerID="1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace" exitCode=0 Dec 05 12:45:07 crc kubenswrapper[4728]: I1205 12:45:07.869053 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerDied","Data":"1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace"} Dec 05 12:45:08 crc kubenswrapper[4728]: I1205 12:45:08.879966 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerStarted","Data":"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94"} Dec 05 12:45:08 crc kubenswrapper[4728]: I1205 12:45:08.896672 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xbvx2" podStartSLOduration=3.45377429 podStartE2EDuration="5.896650694s" podCreationTimestamp="2025-12-05 12:45:03 +0000 UTC" firstStartedPulling="2025-12-05 12:45:05.849022265 +0000 UTC m=+5839.991144958" lastFinishedPulling="2025-12-05 12:45:08.291898659 +0000 UTC m=+5842.434021362" observedRunningTime="2025-12-05 12:45:08.895970895 +0000 UTC m=+5843.038093608" watchObservedRunningTime="2025-12-05 12:45:08.896650694 +0000 UTC m=+5843.038773397" Dec 05 12:45:14 crc kubenswrapper[4728]: I1205 12:45:14.104728 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:14 crc kubenswrapper[4728]: I1205 12:45:14.105358 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:14 crc kubenswrapper[4728]: I1205 12:45:14.193388 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:14 crc kubenswrapper[4728]: I1205 12:45:14.978119 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:15 crc kubenswrapper[4728]: I1205 12:45:15.028620 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:16 crc kubenswrapper[4728]: I1205 12:45:16.946722 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xbvx2" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="registry-server" containerID="cri-o://4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94" gracePeriod=2 Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.581898 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.764734 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities\") pod \"d6a87a70-52b0-457f-9526-a14e342fb0a9\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.764946 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content\") pod \"d6a87a70-52b0-457f-9526-a14e342fb0a9\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.764993 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x68tz\" (UniqueName: \"kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz\") pod \"d6a87a70-52b0-457f-9526-a14e342fb0a9\" (UID: \"d6a87a70-52b0-457f-9526-a14e342fb0a9\") " Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.766512 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities" (OuterVolumeSpecName: "utilities") pod "d6a87a70-52b0-457f-9526-a14e342fb0a9" (UID: "d6a87a70-52b0-457f-9526-a14e342fb0a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.774911 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz" (OuterVolumeSpecName: "kube-api-access-x68tz") pod "d6a87a70-52b0-457f-9526-a14e342fb0a9" (UID: "d6a87a70-52b0-457f-9526-a14e342fb0a9"). InnerVolumeSpecName "kube-api-access-x68tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.823893 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6a87a70-52b0-457f-9526-a14e342fb0a9" (UID: "d6a87a70-52b0-457f-9526-a14e342fb0a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.867214 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.867257 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a87a70-52b0-457f-9526-a14e342fb0a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.867274 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x68tz\" (UniqueName: \"kubernetes.io/projected/d6a87a70-52b0-457f-9526-a14e342fb0a9-kube-api-access-x68tz\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.958258 4728 generic.go:334] "Generic (PLEG): container finished" podID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerID="4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94" exitCode=0 Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.958302 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerDied","Data":"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94"} Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.958328 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbvx2" event={"ID":"d6a87a70-52b0-457f-9526-a14e342fb0a9","Type":"ContainerDied","Data":"b2b634974195e81b50c295ff2586fe08f67938447e53a799f20ab3a9f3b920d2"} Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.958345 4728 scope.go:117] "RemoveContainer" containerID="4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94" Dec 05 12:45:17 crc kubenswrapper[4728]: I1205 12:45:17.958475 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbvx2" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.001401 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.003616 4728 scope.go:117] "RemoveContainer" containerID="1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.012600 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xbvx2"] Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.026304 4728 scope.go:117] "RemoveContainer" containerID="8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.087122 4728 scope.go:117] "RemoveContainer" containerID="4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94" Dec 05 12:45:18 crc kubenswrapper[4728]: E1205 12:45:18.087612 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94\": container with ID starting with 4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94 not found: ID does not exist" containerID="4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.087655 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94"} err="failed to get container status \"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94\": rpc error: code = NotFound desc = could not find container \"4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94\": container with ID starting with 4235df3b933458aeb2d232ae412fc9da4a258a82de15028daf04e4c015b7fd94 not found: ID does not exist" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.087678 4728 scope.go:117] "RemoveContainer" containerID="1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace" Dec 05 12:45:18 crc kubenswrapper[4728]: E1205 12:45:18.088399 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace\": container with ID starting with 1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace not found: ID does not exist" containerID="1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.088439 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace"} err="failed to get container status \"1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace\": rpc error: code = NotFound desc = could not find container \"1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace\": container with ID starting with 1005d0135f91ab009c6a890dd02d06f9fd3310d2868336e67fe0dc5e04d0aace not found: ID does not exist" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.088460 4728 scope.go:117] "RemoveContainer" containerID="8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e" Dec 05 12:45:18 crc kubenswrapper[4728]: E1205 12:45:18.088785 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e\": container with ID starting with 8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e not found: ID does not exist" containerID="8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.088813 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e"} err="failed to get container status \"8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e\": rpc error: code = NotFound desc = could not find container \"8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e\": container with ID starting with 8f1f33c842b52fc3b4a004595c374c8cefa2983ffb46b62a705fcde2ea01183e not found: ID does not exist" Dec 05 12:45:18 crc kubenswrapper[4728]: I1205 12:45:18.366959 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" path="/var/lib/kubelet/pods/d6a87a70-52b0-457f-9526-a14e342fb0a9/volumes" Dec 05 12:46:03 crc kubenswrapper[4728]: I1205 12:46:03.301555 4728 scope.go:117] "RemoveContainer" containerID="9af8c06014f09e163c61dd3acb8f4b0e45bd585b6871919021fc6aa2bffc606c" Dec 05 12:46:25 crc kubenswrapper[4728]: I1205 12:46:25.702469 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:46:25 crc kubenswrapper[4728]: I1205 12:46:25.703085 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.640553 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:37 crc kubenswrapper[4728]: E1205 12:46:37.642095 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="registry-server" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.642114 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="registry-server" Dec 05 12:46:37 crc kubenswrapper[4728]: E1205 12:46:37.642135 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="extract-content" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.642141 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="extract-content" Dec 05 12:46:37 crc kubenswrapper[4728]: E1205 12:46:37.642194 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="extract-utilities" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.642200 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="extract-utilities" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.642446 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a87a70-52b0-457f-9526-a14e342fb0a9" containerName="registry-server" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.644641 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.650222 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.794890 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jkzr\" (UniqueName: \"kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.795070 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.795094 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.896837 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jkzr\" (UniqueName: \"kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.896998 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.897022 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.897663 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.897716 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.918633 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jkzr\" (UniqueName: \"kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr\") pod \"redhat-marketplace-wzmlh\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:37 crc kubenswrapper[4728]: I1205 12:46:37.964936 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:38 crc kubenswrapper[4728]: I1205 12:46:38.487690 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:38 crc kubenswrapper[4728]: I1205 12:46:38.759997 4728 generic.go:334] "Generic (PLEG): container finished" podID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerID="8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99" exitCode=0 Dec 05 12:46:38 crc kubenswrapper[4728]: I1205 12:46:38.760055 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerDied","Data":"8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99"} Dec 05 12:46:38 crc kubenswrapper[4728]: I1205 12:46:38.760088 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerStarted","Data":"a6f23ff268e9b4703446cf598c16d4ed3db94ee1594b2333e6159a0701eb34f5"} Dec 05 12:46:39 crc kubenswrapper[4728]: I1205 12:46:39.771021 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerStarted","Data":"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77"} Dec 05 12:46:40 crc kubenswrapper[4728]: I1205 12:46:40.785411 4728 generic.go:334] "Generic (PLEG): container finished" podID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerID="26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77" exitCode=0 Dec 05 12:46:40 crc kubenswrapper[4728]: I1205 12:46:40.785537 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerDied","Data":"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77"} Dec 05 12:46:40 crc kubenswrapper[4728]: I1205 12:46:40.786212 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerStarted","Data":"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30"} Dec 05 12:46:40 crc kubenswrapper[4728]: I1205 12:46:40.809647 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzmlh" podStartSLOduration=2.179352929 podStartE2EDuration="3.809628513s" podCreationTimestamp="2025-12-05 12:46:37 +0000 UTC" firstStartedPulling="2025-12-05 12:46:38.761355339 +0000 UTC m=+5932.903478032" lastFinishedPulling="2025-12-05 12:46:40.391630923 +0000 UTC m=+5934.533753616" observedRunningTime="2025-12-05 12:46:40.802524713 +0000 UTC m=+5934.944647426" watchObservedRunningTime="2025-12-05 12:46:40.809628513 +0000 UTC m=+5934.951751206" Dec 05 12:46:47 crc kubenswrapper[4728]: I1205 12:46:47.965625 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:47 crc kubenswrapper[4728]: I1205 12:46:47.966256 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:48 crc kubenswrapper[4728]: I1205 12:46:48.015894 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:48 crc kubenswrapper[4728]: I1205 12:46:48.916648 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:48 crc kubenswrapper[4728]: I1205 12:46:48.985930 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:50 crc kubenswrapper[4728]: I1205 12:46:50.876910 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wzmlh" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="registry-server" containerID="cri-o://54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30" gracePeriod=2 Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.360023 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.449627 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jkzr\" (UniqueName: \"kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr\") pod \"d26dcd31-1801-455c-a5bb-ef444abca2c6\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.449763 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities\") pod \"d26dcd31-1801-455c-a5bb-ef444abca2c6\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.449841 4728 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content\") pod \"d26dcd31-1801-455c-a5bb-ef444abca2c6\" (UID: \"d26dcd31-1801-455c-a5bb-ef444abca2c6\") " Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.451142 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities" (OuterVolumeSpecName: "utilities") pod "d26dcd31-1801-455c-a5bb-ef444abca2c6" (UID: "d26dcd31-1801-455c-a5bb-ef444abca2c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.459376 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr" (OuterVolumeSpecName: "kube-api-access-9jkzr") pod "d26dcd31-1801-455c-a5bb-ef444abca2c6" (UID: "d26dcd31-1801-455c-a5bb-ef444abca2c6"). InnerVolumeSpecName "kube-api-access-9jkzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.470373 4728 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d26dcd31-1801-455c-a5bb-ef444abca2c6" (UID: "d26dcd31-1801-455c-a5bb-ef444abca2c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.551774 4728 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.551817 4728 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26dcd31-1801-455c-a5bb-ef444abca2c6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.551830 4728 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jkzr\" (UniqueName: \"kubernetes.io/projected/d26dcd31-1801-455c-a5bb-ef444abca2c6-kube-api-access-9jkzr\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.888973 4728 generic.go:334] "Generic (PLEG): container finished" podID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerID="54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30" exitCode=0 Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.889373 4728 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzmlh" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.889717 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerDied","Data":"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30"} Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.890161 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzmlh" event={"ID":"d26dcd31-1801-455c-a5bb-ef444abca2c6","Type":"ContainerDied","Data":"a6f23ff268e9b4703446cf598c16d4ed3db94ee1594b2333e6159a0701eb34f5"} Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.890212 4728 scope.go:117] "RemoveContainer" containerID="54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.911916 4728 scope.go:117] "RemoveContainer" containerID="26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.948844 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.950292 4728 scope.go:117] "RemoveContainer" containerID="8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.958314 4728 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzmlh"] Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.982139 4728 scope.go:117] "RemoveContainer" containerID="54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30" Dec 05 12:46:51 crc kubenswrapper[4728]: E1205 12:46:51.982781 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30\": container with ID starting with 54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30 not found: ID does not exist" containerID="54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.982834 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30"} err="failed to get container status \"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30\": rpc error: code = NotFound desc = could not find container \"54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30\": container with ID starting with 54d503bb3aae1145351628e0f3b944fd249d633a2eb06cf583da6d6d06940e30 not found: ID does not exist" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.982860 4728 scope.go:117] "RemoveContainer" containerID="26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77" Dec 05 12:46:51 crc kubenswrapper[4728]: E1205 12:46:51.983198 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77\": container with ID starting with 26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77 not found: ID does not exist" containerID="26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.983260 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77"} err="failed to get container status \"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77\": rpc error: code = NotFound desc = could not find container \"26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77\": container with ID starting with 26218094acf564a9ef6d1b221085771fee3295ff554ed4fab8297be159c0cf77 not found: ID does not exist" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.983279 4728 scope.go:117] "RemoveContainer" containerID="8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99" Dec 05 12:46:51 crc kubenswrapper[4728]: E1205 12:46:51.983703 4728 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99\": container with ID starting with 8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99 not found: ID does not exist" containerID="8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99" Dec 05 12:46:51 crc kubenswrapper[4728]: I1205 12:46:51.983750 4728 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99"} err="failed to get container status \"8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99\": rpc error: code = NotFound desc = could not find container \"8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99\": container with ID starting with 8b8698545e21fb05340ecb2bb403107a1ec3e7479f3a631a79642f30681f0d99 not found: ID does not exist" Dec 05 12:46:52 crc kubenswrapper[4728]: I1205 12:46:52.363891 4728 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" path="/var/lib/kubelet/pods/d26dcd31-1801-455c-a5bb-ef444abca2c6/volumes" Dec 05 12:46:55 crc kubenswrapper[4728]: I1205 12:46:55.702050 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:46:55 crc kubenswrapper[4728]: I1205 12:46:55.702655 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:47:25 crc kubenswrapper[4728]: I1205 12:47:25.701771 4728 patch_prober.go:28] interesting pod/machine-config-daemon-w8qlp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:47:25 crc kubenswrapper[4728]: I1205 12:47:25.703577 4728 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:47:25 crc kubenswrapper[4728]: I1205 12:47:25.703758 4728 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" Dec 05 12:47:25 crc kubenswrapper[4728]: I1205 12:47:25.704751 4728 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e"} pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:47:25 crc kubenswrapper[4728]: I1205 12:47:25.704986 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerName="machine-config-daemon" containerID="cri-o://237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" gracePeriod=600 Dec 05 12:47:25 crc kubenswrapper[4728]: E1205 12:47:25.829734 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:47:26 crc kubenswrapper[4728]: I1205 12:47:26.241533 4728 generic.go:334] "Generic (PLEG): container finished" podID="95bfa60b-fcb6-4519-abc5-c25fea50921d" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" exitCode=0 Dec 05 12:47:26 crc kubenswrapper[4728]: I1205 12:47:26.241572 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" event={"ID":"95bfa60b-fcb6-4519-abc5-c25fea50921d","Type":"ContainerDied","Data":"237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e"} Dec 05 12:47:26 crc kubenswrapper[4728]: I1205 12:47:26.241666 4728 scope.go:117] "RemoveContainer" containerID="21c4725e91db14016beb508c555e5a7a654dd3ce139344fe17ac8e6fa2ec20ce" Dec 05 12:47:26 crc kubenswrapper[4728]: I1205 12:47:26.242319 4728 scope.go:117] "RemoveContainer" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" Dec 05 12:47:26 crc kubenswrapper[4728]: E1205 12:47:26.242584 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:47:40 crc kubenswrapper[4728]: I1205 12:47:40.352214 4728 scope.go:117] "RemoveContainer" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" Dec 05 12:47:40 crc kubenswrapper[4728]: E1205 12:47:40.353043 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:47:52 crc kubenswrapper[4728]: I1205 12:47:52.351914 4728 scope.go:117] "RemoveContainer" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" Dec 05 12:47:52 crc kubenswrapper[4728]: E1205 12:47:52.352719 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:48:05 crc kubenswrapper[4728]: I1205 12:48:05.351858 4728 scope.go:117] "RemoveContainer" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" Dec 05 12:48:05 crc kubenswrapper[4728]: E1205 12:48:05.355240 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.231499 4728 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k2rzs"] Dec 05 12:48:16 crc kubenswrapper[4728]: E1205 12:48:16.232527 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="extract-utilities" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.232544 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="extract-utilities" Dec 05 12:48:16 crc kubenswrapper[4728]: E1205 12:48:16.232563 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="registry-server" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.232572 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="registry-server" Dec 05 12:48:16 crc kubenswrapper[4728]: E1205 12:48:16.232597 4728 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="extract-content" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.232607 4728 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="extract-content" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.233069 4728 memory_manager.go:354] "RemoveStaleState removing state" podUID="d26dcd31-1801-455c-a5bb-ef444abca2c6" containerName="registry-server" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.235539 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.255043 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2rzs"] Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.366320 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-catalog-content\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.366394 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-utilities\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.366416 4728 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6gsp\" (UniqueName: \"kubernetes.io/projected/97ebb964-f70e-4ead-9ff2-15d499247a3f-kube-api-access-g6gsp\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.468579 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-catalog-content\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.468694 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-utilities\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.468731 4728 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6gsp\" (UniqueName: \"kubernetes.io/projected/97ebb964-f70e-4ead-9ff2-15d499247a3f-kube-api-access-g6gsp\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.469584 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-catalog-content\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.469604 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ebb964-f70e-4ead-9ff2-15d499247a3f-utilities\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.497530 4728 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6gsp\" (UniqueName: \"kubernetes.io/projected/97ebb964-f70e-4ead-9ff2-15d499247a3f-kube-api-access-g6gsp\") pod \"certified-operators-k2rzs\" (UID: \"97ebb964-f70e-4ead-9ff2-15d499247a3f\") " pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:16 crc kubenswrapper[4728]: I1205 12:48:16.565217 4728 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:17 crc kubenswrapper[4728]: I1205 12:48:17.189648 4728 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2rzs"] Dec 05 12:48:17 crc kubenswrapper[4728]: I1205 12:48:17.352918 4728 scope.go:117] "RemoveContainer" containerID="237b1d5709363ccdea953ebc83886abd8aa2b488a53efb6bb37e272a595b0a6e" Dec 05 12:48:17 crc kubenswrapper[4728]: E1205 12:48:17.353483 4728 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w8qlp_openshift-machine-config-operator(95bfa60b-fcb6-4519-abc5-c25fea50921d)\"" pod="openshift-machine-config-operator/machine-config-daemon-w8qlp" podUID="95bfa60b-fcb6-4519-abc5-c25fea50921d" Dec 05 12:48:17 crc kubenswrapper[4728]: I1205 12:48:17.759618 4728 generic.go:334] "Generic (PLEG): container finished" podID="97ebb964-f70e-4ead-9ff2-15d499247a3f" containerID="7061d07c501c120370ecb985435e317c1923adafe259a5d56276413171aaf4ff" exitCode=0 Dec 05 12:48:17 crc kubenswrapper[4728]: I1205 12:48:17.759672 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2rzs" event={"ID":"97ebb964-f70e-4ead-9ff2-15d499247a3f","Type":"ContainerDied","Data":"7061d07c501c120370ecb985435e317c1923adafe259a5d56276413171aaf4ff"} Dec 05 12:48:17 crc kubenswrapper[4728]: I1205 12:48:17.759720 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2rzs" event={"ID":"97ebb964-f70e-4ead-9ff2-15d499247a3f","Type":"ContainerStarted","Data":"a4ce8190985cbffb5ffa52f8033b7c3a8b5958c60209b391bd1a25984fbe9505"} Dec 05 12:48:18 crc kubenswrapper[4728]: I1205 12:48:18.770811 4728 generic.go:334] "Generic (PLEG): container finished" podID="97ebb964-f70e-4ead-9ff2-15d499247a3f" containerID="c0e43c76fd20ea3e572e14fa71d950bde3acec6bf6bf377c72463adf21a83a4c" exitCode=0 Dec 05 12:48:18 crc kubenswrapper[4728]: I1205 12:48:18.770923 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2rzs" event={"ID":"97ebb964-f70e-4ead-9ff2-15d499247a3f","Type":"ContainerDied","Data":"c0e43c76fd20ea3e572e14fa71d950bde3acec6bf6bf377c72463adf21a83a4c"} Dec 05 12:48:19 crc kubenswrapper[4728]: I1205 12:48:19.781044 4728 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2rzs" event={"ID":"97ebb964-f70e-4ead-9ff2-15d499247a3f","Type":"ContainerStarted","Data":"b43ac66fd342c9c0341605d095d86c9c8a4c483bef76a5349de1a03ab6ddbd94"} Dec 05 12:48:19 crc kubenswrapper[4728]: I1205 12:48:19.806705 4728 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k2rzs" podStartSLOduration=2.328317171 podStartE2EDuration="3.806681501s" podCreationTimestamp="2025-12-05 12:48:16 +0000 UTC" firstStartedPulling="2025-12-05 12:48:17.763852954 +0000 UTC m=+6031.905975647" lastFinishedPulling="2025-12-05 12:48:19.242217284 +0000 UTC m=+6033.384339977" observedRunningTime="2025-12-05 12:48:19.799457058 +0000 UTC m=+6033.941579761" watchObservedRunningTime="2025-12-05 12:48:19.806681501 +0000 UTC m=+6033.948804214" Dec 05 12:48:26 crc kubenswrapper[4728]: I1205 12:48:26.565990 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:26 crc kubenswrapper[4728]: I1205 12:48:26.566511 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:26 crc kubenswrapper[4728]: I1205 12:48:26.628289 4728 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:26 crc kubenswrapper[4728]: I1205 12:48:26.881458 4728 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k2rzs" Dec 05 12:48:26 crc kubenswrapper[4728]: I1205 12:48:26.926360 4728 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k2rzs"] Dec 05 12:48:28 crc kubenswrapper[4728]: I1205 12:48:28.854543 4728 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k2rzs" podUID="97ebb964-f70e-4ead-9ff2-15d499247a3f" containerName="registry-server" containerID="cri-o://b43ac66fd342c9c0341605d095d86c9c8a4c483bef76a5349de1a03ab6ddbd94" gracePeriod=2 var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114552242024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114552243017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114536074016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114536074015463 5ustar corecore